diff --git a/backend/.env.example b/backend/.env.example
index 4b3afe9..f170ce4 100644
--- a/backend/.env.example
+++ b/backend/.env.example
@@ -12,7 +12,7 @@ SECRET_KEY = "your_secret_key"
ALGORITHM = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES = "720"
-# SEARCHE ENGINES TO USE - FUTURE FEATURE - LEAVE EMPTY FOR NOW
+# SEARCHE ENGINES TO USE FOR WEB SEARCH
TAVILY_API_KEY=""
diff --git a/backend/DataExample.py b/backend/DataExample.py
deleted file mode 100644
index 784ea52..0000000
--- a/backend/DataExample.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from langchain_core.documents import Document
-
-examples = [Document(metadata={'BrowsingSessionId': '48573766', 'VisitedWebPageURL': 'https://www.google.com/search?q=appropiate&oq=appropiate&gs_lcrp=EgZjaHJvbWUyBggAEEUYOTIMCAEQABgKGLEDGIAEMgkIAhAAGAoYgAQyDAgDEAAYChixAxiABDIMCAQQABgKGLEDGIAEMgwIBRAAGAoYsQMYgAQyCQgGEAAYChiABDIPCAcQABgKGIMBGLEDGIAEMgwICBAAGAoYsQMYgAQyDwgJEC4YChiDARixAxiABNIBBzU0MGowajeoAgCwAgA&sourceid=chrome&ie=UTF-8', 'VisitedWebPageTitle': 'appropriate - Google Search', 'VisitedWebPageDateWithTimeInISOString': '2024-08-13T16:45:49-07:00', 'VisitedWebPageReffererURL': 'START', 'VisitedWebPageVisitDurationInMilliseconds': 737481, 'VisitedWebPageContent': None}, page_content='\n# Filters and Topics\n\nAll [Images](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&q=appropriate&udm=2&fbs=AEQNm0Aa4sjWe7Rqy32pFwRj0UkWd8nbOJfsBGGB5IQQO6L3J_86uWOeqwdnV0yaSF-x2jogM63VUdBhAMVqo6r6ESHk5nqQaIu-CtMsUMpSSNn0pSGCVZhi29x2o-Ry682PHejf9PCKN3QnlhCEsRTGdLyEMu5J9qKBk0VUJ_lLg6DoHZBcDVsrfaS_0rz_0qxz8T1g2VAKGGXFLu3uTnuhAfZALh0CWg&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQtKgLegQIEhAB) [Videos](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&q=appropriate&tbm=vid&source=lnms&fbs=AEQNm0Aa4sjWe7Rqy32pFwRj0UkWd8nbOJfsBGGB5IQQO6L3J_86uWOeqwdnV0yaSF-x2jogM63VUdBhAMVqo6r6ESHk5nqQaIu-CtMsUMpSSNn0pSGCVZhi29x2o-Ry682PHejf9PCKN3QnlhCEsRTGdLyEMu5J9qKBk0VUJ_lLg6DoHZBcDVsrfaS_0rz_0qxz8T1g2VAKGGXFLu3uTnuhAfZALh0CWg&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQ0pQJegQIERAB) [Shopping](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&q=appropriate&udm=28&fbs=AEQNm0Aa4sjWe7Rqy32pFwRj0UkWd8nbOJfsBGGB5IQQO6L3J_86uWOeqwdnV0yaSF-x2jogM63VUdBhAMVqo6r6ESHk5nqQaIu-CtMsUMpSSNn0pSGCVZhi29x2o-Ry682PHejf9PCKN3QnlhCEsRTGdLyEMu5J9qKBk0VUJ_lLg6DoHZBcDVsrfaS_0rz_0qxz8T1g2VAKGGXFLu3uTnuhAfZALh0CWg&ved=1t:220175&ictx=111) [Web](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&q=appropriate&udm=14&fbs=AEQNm0Aa4sjWe7Rqy32pFwRj0UkWd8nbOJfsBGGB5IQQO6L3J_86uWOeqwdnV0yaSF-x2jogM63VUdBhAMVqo6r6ESHk5nqQaIu-CtMsUMpSSNn0pSGCVZhi29x2o-Ry682PHejf9PCKN3QnlhCEsRTGdLyEMu5J9qKBk0VUJ_lLg6DoHZBcDVsrfaS_0rz_0qxz8T1g2VAKGGXFLu3uTnuhAfZALh0CWg&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQs6gLegQIEBAB) [News](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&q=appropriate&tbm=nws&source=lnms&fbs=AEQNm0Aa4sjWe7Rqy32pFwRj0UkWd8nbOJfsBGGB5IQQO6L3J_86uWOeqwdnV0yaSF-x2jogM63VUdBhAMVqo6r6ESHk5nqQaIu-CtMsUMpSSNn0pSGCVZhi29x2o-Ry682PHejf9PCKN3QnlhCEsRTGdLyEMu5J9qKBk0VUJ_lLg6DoHZBcDVsrfaS_0rz_0qxz8T1g2VAKGGXFLu3uTnuhAfZALh0CWg&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQ0pQJegQIDRAB) [Forums](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&q=appropriate&udm=18&fbs=AEQNm0Aa4sjWe7Rqy32pFwRj0UkWd8nbOJfsBGGB5IQQO6L3J_86uWOeqwdnV0yaSF-x2jogM63VUdBhAMVqo6r6ESHk5nqQaIu-CtMsUMpSSNn0pSGCVZhi29x2o-Ry682PHejf9PCKN3QnlhCEsRTGdLyEMu5J9qKBk0VUJ_lLg6DoHZBcDVsrfaS_0rz_0qxz8T1g2VAKGGXFLu3uTnuhAfZALh0CWg&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQs6gLegQIDBAB) More [Books](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&q=appropriate&tbm=bks&source=lnms&fbs=AEQNm0Aa4sjWe7Rqy32pFwRj0UkWd8nbOJfsBGGB5IQQO6L3J_86uWOeqwdnV0yaSF-x2jogM63VUdBhAMVqo6r6ESHk5nqQaIu-CtMsUMpSSNn0pSGCVZhi29x2o-Ry682PHejf9PCKN3QnlhCEsRTGdLyEMu5J9qKBk0VUJ_lLg6DoHZBcDVsrfaS_0rz_0qxz8T1g2VAKGGXFLu3uTnuhAfZALh0CWg&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQ0pQJegQIeBAB) [Maps](https://maps.google.com/maps?sca_esv=23b8fd744dd7098e&sca_upv=1&output=search&q=appropriate&source=lnms&fbs=AEQNm0Aa4sjWe7Rqy32pFwRj0UkWd8nbOJfsBGGB5IQQO6L3J_86uWOeqwdnV0yaSF-x2jogM63VUdBhAMVqo6r6ESHk5nqQaIu-CtMsUMpSSNn0pSGCVZhi29x2o-Ry682PHejf9PCKN3QnlhCEsRTGdLyEMu5J9qKBk0VUJ_lLg6DoHZBcDVsrfaS_0rz_0qxz8T1g2VAKGGXFLu3uTnuhAfZALh0CWg&entry=mc&ved=1t:200715&ictx=111) [Flights](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/travel/flights?sca_esv=23b8fd744dd7098e&sca_upv=1&output=search&q=appropriate&source=lnms&fbs=AEQNm0Aa4sjWe7Rqy32pFwRj0UkWd8nbOJfsBGGB5IQQO6L3J_86uWOeqwdnV0yaSF-x2jogM63VUdBhAMVqo6r6ESHk5nqQaIu-CtMsUMpSSNn0pSGCVZhi29x2o-Ry682PHejf9PCKN3QnlhCEsRTGdLyEMu5J9qKBk0VUJ_lLg6DoHZBcDVsrfaS_0rz_0qxz8T1g2VAKGGXFLu3uTnuhAfZALh0CWg&ved=1t:200715&ictx=111) Tools\nAny time Any time [Past hour](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?q=appropriate&sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&source=lnt&tbs=qdr:h&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQpwV6BAgHEAc) [Past 24 hours](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?q=appropriate&sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&source=lnt&tbs=qdr:d&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQpwV6BAgHEAg) [Past week](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?q=appropriate&sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&source=lnt&tbs=qdr:w&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQpwV6BAgHEAk) [Past month](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?q=appropriate&sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&source=lnt&tbs=qdr:m&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQpwV6BAgHEAo) [Past year](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?q=appropriate&sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&source=lnt&tbs=qdr:y&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQpwV6BAgHEAs) Custom range... Custom date range From To Go All results All results [Verbatim](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?q=appropiate&sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&source=lnt&tbs=li:1&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQpwV6BAgHEBM) [Advanced Search](https://www.google.com/advanced_search?q=appropiate&gs_lcrp=EgZjaHJvbWUyBggAEEUYOTIMCAEQABgKGLEDGIAEMgkIAhAAGAoYgAQyDAgDEAAYChixAxiABDIMCAQQABgKGLEDGIAEMgwIBRAAGAoYsQMYgAQyCQgGEAAYChiABDIPCAcQABgKGIMBGLEDGIAEMgwICBAAGAoYsQMYgAQyDwgJEC4YChiDARixAxiABNIBBzU0MGowajeoAgCwAgA&ie=UTF-8) About 5,600,000,000 results (0.24 seconds) Ctrl+Shift+X to select\n# Search settings\n\n Search history Saving Delete last 15 min SafeSearch Blurring on Language English Dark theme Device default [More settings](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/preferences?hl=en&prev=https://www.google.com/search?q%3Dappropriate%26sca_esv%3D23b8fd744dd7098e%26sca_upv%3D1%26sxsrf%3DADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993) Send feedback [Help](https://support.google.com/websearch/?p=dsrp_search_hc&hl=en) • [Privacy](https://policies.google.com/privacy?hl=en&fg=1) • [Terms](https://policies.google.com/terms?hl=en&fg=1) Choose what you’re giving feedback on Or give [general feedback](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/popup.html#) appropriate Send feedback Overview Similar and opposite words Usage examples\n# Search Results\n\n# Main Results\n\n## Loading\n\nSomething went wrong. TRY AGAIN Choose what you’re giving feedback on Or give [general feedback](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/popup.html#) Dictionary Definitions from [Oxford Languages](https://languages.oup.com/google-dictionary-en) · [Learn more](https://support.google.com/websearch/answer/10106608?hl=en) ap·pro·pri·ate *adjective* adjective : **appropriate** / əˈprōprēət /\n1. suitable or proper in the circumstances. "a measure **appropriate to** a wartime economy" h Similar: suitable proper fitting apt relevant connected pertinent apposite applicable germane material significant right congruous to the point to the purpose convenient expedient favorable auspicious propitious opportune felicitous timely well judged well timed seemly befitting deserved ad rem appurtenant meet seasonable h Opposite: inappropriate irrelevant\n\n*verb* verb : **appropriate** ; 3rd person present : **appropriates** ; past tense : **appropriated** ; past participle : **appropriated** ; gerund or present participle : **appropriating** / əˈprōprēˌāt /\n1. 1 . take (something) for one\'s own use, typically without the owner\'s permission. "his images have been appropriated by advertisers" h Similar: seize commandeer expropriate annex arrogate sequestrate sequester take possession of take over assume secure acquire wrest usurp claim lay claim to hijack steal take misappropriate thieve pilfer pocket purloin make off with embezzle swipe nab rip off lift filch snaffle snitch bag walk off/away with liberate pinch nick half-inch whip knock off peculate defalcate abstract plagiarize copy reproduce poach bootleg infringe the copyright of pirate crib\n2. 2 . devote (money or assets) to a special purpose. "there can be problems in **appropriating** funds **for** legal expenses" h Similar: allocate assign allot earmark set apart/aside devote apportion budget\n\nOrigin late Middle English: from late Latin *appropriatus* , past participle of *appropriare* ‘make one\'s own’, from *ad-* ‘to’ + *proprius* ‘own, proper’. Use over time for: appropriate  Feedback More definitions Show less People also ask What is the meaning of the word appropriate? especially suitable or compatible : **especially suitable or compatible** : fitting. an appropriate response. remarks appropriate to the occasion. \n### Appropriate Definition & Meaning - Merriam-Webster\n\n Merriam-Webster https://www.merriam-webster.com › dictionary › appro... Merriam-Webster https://www.merriam-webster.com › dictionary › appro... Search for: [What is the meaning of the word appropriate?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&q=What+is+the+meaning+of+the+word+appropriate%3F&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQzmd6BAgmEAY) What does so appropriate mean? **suitable or fitting for a particular purpose, person, occasion, etc.** : an appropriate example; an appropriate dress. Synonyms: meet, pertinent, becoming, due, proper, suited, felicitous, apt, befitting. Antonyms: inept, inappropriate, unsuitable. \n### APPROPRIATE Definition & Meaning - Dictionary.com\n\n Dictionary.com https://www.dictionary.com › browse › appropriate Dictionary.com https://www.dictionary.com › browse › appropriate Search for: [What does so appropriate mean?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&q=What+does+so+appropriate+mean%3F&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQzmd6BAgqEAY) What is the other meaning of appropriate? Some common synonyms of appropriate are apt, felicitous, fitting, fit, happy, meet, proper, and suitable. While all these words mean " **right with respect to some end, need, use, or circumstance** ," appropriate implies eminent or distinctive fitness. an appropriate gift. \n### APPROPRIATE Synonyms: 171 Similar and Opposite Words\n\n Merriam-Webster https://www.merriam-webster.com › thesaurus › approp... Merriam-Webster https://www.merriam-webster.com › thesaurus › approp... Search for: [What is the other meaning of appropriate?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&q=What+is+the+other+meaning+of+appropriate%3F&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQzmd6BAgpEAY) What is the meaning of the word appropriative? Definitions of appropriative. adjective. **of or relating to or given to the act of taking for yourself**. \n### Appropriative - Definition, Meaning & Synonyms - Vocabulary.com\n\n Vocabulary.com https://www.vocabulary.com › dictionary › appropriative Vocabulary.com https://www.vocabulary.com › dictionary › appropriative Search for: [What is the meaning of the word appropriative?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&q=What+is+the+meaning+of+the+word+appropriative%3F&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQzmd6BAgnEAY) Feedback \n### Appropriate Definition & Meaning\n\n Merriam-Webster https://www.merriam-webster.com › dictionary › appro... Merriam-Webster https://www.merriam-webster.com › dictionary › appro... 7 days ago — suitable implies an answering to requirements or demands. clothes suitable for camping. Things to know When to use … when to use appropriate appropriate adjective (CORRECT)\n\n**correct or right for a particular situation or occasion** : Punishment should be appropriate to the crime. I don\'t have any appropriate clothes. \n### APPROPRIATE \\| English meaning - Cambridge Dictionary\n\n cambridge.org https://dictionary.cambridge.org › dictionary › appropriate cambridge.org https://dictionary.cambridge.org › dictionary › appropriate [when to use appropriate](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&q=when+to+use+appropriate&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQrooIegQINRAF) Example … what is an example of appropriate Something appropriate is correct and fits the situation. **A sweater-vest with reindeer on it is appropriate holiday apparel** , even if it\'s totally embarrassing. The adjective appropriate is used when something is suitable or fitting. \n### Appropriate - Definition, Meaning & Synonyms - Vocabulary.com\n\n vocabulary.com https://www.vocabulary.com › dictionary › appropriate vocabulary.com https://www.vocabulary.com › dictionary › appropriate [what is an example of appropriate](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&q=what+is+an+example+of+appropriate&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQrooIegQINRAK) \n### APPROPRIATE \\| definition in the Cambridge English Dictionary\n\n Cambridge Dictionary https://dictionary.cambridge.org › dictionary › appropri... Cambridge Dictionary https://dictionary.cambridge.org › dictionary › appropri... *APPROPRIATE* meaning: 1. suitable or right for a particular situation or occasion: 2. to take something for your own…. Learn more. \n### 103 Synonyms & Antonyms for APPROPRIATE\n\n Thesaurus.com https://www.thesaurus.com › browse › appropriate Thesaurus.com https://www.thesaurus.com › browse › appropriate *appropriate* · adjective as in suitable. Compare Synonyms. Synonyms Antonyms. Strongest matches · verb as in set aside; allocate. Compare Synonyms. Synonyms \n### APPROPRIATE Definition & Meaning\n\n Dictionary.com https://www.dictionary.com › browse › appropriate Dictionary.com https://www.dictionary.com › browse › appropriate verb · to take for one\'s own use, esp illegally or without permission · to put aside (funds, etc) for a particular purpose or person. \n### Appropriate - Definition, Meaning & Synonyms\n\n Vocabulary.com https://www.vocabulary.com › dictionary › appropriate Vocabulary.com https://www.vocabulary.com › dictionary › appropriate Something *appropriate* is correct and fits the situation. A sweater-vest with reindeer on it is *appropriate* holiday apparel, even if it\'s totally embarrassing. \n### Appropriate Definition & Meaning \\| Britannica Dictionary\n\n Britannica https://www.britannica.com › dictionary › appropriate Britannica https://www.britannica.com › dictionary › appropriate to take or use (something) especially in a way that is illegal, unfair, etc. The economy has been weakened by corrupt officials who have appropriated the\xa0... \n### APPROPRIATE definition in American English\n\n Collins Dictionary https://www.collinsdictionary.com › dictionary › appro... Collins Dictionary https://www.collinsdictionary.com › dictionary › appro... *appropriate* in American English · 1. suitable or fitting for a particular purpose, person, occasion, etc · 2. belonging to or peculiar to a person; proper · 3. \n### APPROPRIATE Synonyms: 171 Similar and Opposite Words\n\n Merriam-Webster https://www.merriam-webster.com › thesaurus › approp... Merriam-Webster https://www.merriam-webster.com › thesaurus › approp... 7 days ago — Some common synonyms of *appropriate* are apt, felicitous, fitting, fit, happy, meet, proper, and suitable. While all these words mean "right with\xa0... \n### Sarah Paulson in APPROPRIATE — Broadway\'s Best ...\n\n appropriateplay.com https://appropriateplay.com appropriateplay.com https://appropriateplay.com Emmy and Golden Globe Award winner Sarah Paulson leads “one of the best casts on Broadway” (Deadline) in *APPROPRIATE* , a darkly comic American family drama by\xa0... [Images](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?q=appropriate&sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&udm=2&source=univ&ictx=2&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQw_oBegQIbhAC) [More images](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&q=appropriate&udm=2&source=univ&fir=apwyFFOVgTuLJM%252CLhwbNG9CLuV1jM%252C_%253BTVg7HS9s-lGieM%252Colu_MW8EF29kAM%252C_%253BgiAA2v83KDFU_M%252CDxGLsghzY6nueM%252C_%253BEj_unyiwVeaA9M%252CNE8LVFotqBrMLM%252C_%253B6F89cr8jeo4RgM%252CKRuhnQq9Ic6i9M%252C_&usg=AI4_-kQC3yxrw4ZdD2ka0xIxismmz9Nf5g&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQiR56BAhqEAM) \n### appropriate - Wiktionary, the free dictionary\n\n Wiktionary https://en.wiktionary.org › wiki › appropriate Wiktionary https://en.wiktionary.org › wiki › appropriate (of an action or thing) Morally good; positive. Rescuing animals is an *appropriate* thing to do. Synonyms. People also search for [](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&q=Appropriate&stick=H4sIAAAAAAAAAOMwVGI0-MXIsIGF4RULPxevfrq-oWFJcrJpYXqJOUKkrLK4sNw41_gVCw8Xl36uvoFReVm6WR6ca1YC5Jq8YuHm4gRxjUsqC7KRdOeUVGWUm1QgRHKSc-JzDFMKESJFZWVVBZZJFXAzDMvTzCsR8smmFeUlFgYli1i5HQsKivILijITS1JvsUkydD_tMQ6MDyl4p9J8UOrQniVPTX6um13mvQoAY0F1qdsAAAA&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQs9oBKAB6BAg9EAk) Appropriate [](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&q=Stereophonic&stick=H4sIAAAAAAAAAOMwVGI0_MXIsIGF4RULPxevfrq-oWFJcrJpYXqJOUKkrLK4sNw41_gVCw8Xl36uvoFReVm6WR6ca1YC5Jq8YuHm4gRxjUsqC7KRdOeUVGWUm1QgRHKSc-JzDFMKESJFZWVVBZZJFXAzDMvTzCsR8smmFeUlFgYli1h5gktSi1LzCzLy8zKTb7FJMnQ_7TEOjA8peKfSfFDq0J4lT01-rptd5r0KAKUPMBbcAAAA&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQs9oBKAB6BAg9EA4) Stereophonic [](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&q=Merrily+We+Roll+Along&stick=H4sIAAAAAAAAAOMwVGI0-sXIsIGF4RULPxevfrq-oWFJcrJpYXqJOUKkrLK4sNw41_gVCw8Xl36uvoFReVm6WR6ca1YC5Jq8YuHm4gRxjUsqC7KRdOeUVGWUm1QgRHKSc-JzDFMKESJFZWVVBZZJFXAzDMvTzCsR8smmFeUlFgYli1hFfVOLijJzKhXCUxWC8nNyFBxz8vPSb7FJMnQ_7TEOjA8peKfSfFDq0J4lT01-rptd5r0KAI66z77lAAAA&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQs9oBKAB6BAg9EBM) Merrily We Roll Along [](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&q=Purlie+Victorious&stick=H4sIAAAAAAAAAOMwVGI0_sXIsIGF4RULPxevfrq-oWFJcrJpYXqJOUKkrLK4sNw41_gVCw8Xl36uvoFReVm6WR6ca1YC5Jq8YuHm4gRxjUsqC7KRdOeUVGWUm1QgRHKSc-JzDFMKESJFZWVVBZZJFXAzDMvTzCsR8smmFeUlFgYli1gFA0qLcjJTFcIyk0vyizLzS4tvsUkydD_tMQ6MDyl4p9J8UOrQniVPTX6um13mvQoANgKNQuEAAAA&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQs9oBKAB6BAg9EBg) Purlie Victorious [](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&q=Spamalot&stick=H4sIAAAAAAAAAOMwVGI0-cXIsIGF4RULPxevfrq-oWFJcrJpYXqJOUKkrLK4sNw41_gVCw8Xl36uvoFReVm6WR6ca1YC5Jq8YuHm4gRxjUsqC7KRdOeUVGWUm1QgRHKSc-JzDFMKESJFZWVVBZZJFXAzDMvTzCsR8smmFeUlFgYli1g5ggsScxNz8ktusUkydD_tMQ6MDyl4p9J8UOrQniVPTX6um13mvQoArkcGZdgAAAA&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQs9oBKAB6BAg9EB0) Spamalot [](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&q=Hell%27s+Kitchen&stick=H4sIAAAAAAAAAOMwVGI0_cXIsIGF4RULPxevfrq-oWFJcrJpYXqJOUKkrLK4sNw41_gVCw8Xl36uvoFReVm6WR6ca1YC5Jq8YuHm4gRxjUsqC7KRdOeUVGWUm1QgRHKSc-JzDFMKESJFZWVVBZZJFXAzDMvTzCsR8smmFeUlFgYli1j5PFJzctSLFbwzS5IzUvNusUkydD_tMQ6MDyl4p9J8UOrQniVPTX6um13mvQoAbwUq5d4AAAA&sa=X&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQs9oBKAB6BAg9ECI) Hell\'s Kitchen Feedback Appropriate **meaning in Hindi** Appropriate **Broadway** Appropriate **synonyms** Appropriate **meaning** Appropriate **play** Appropriate **words** Appropriate **examples** Appropriate **in a sentence**\n# Main Results\n\n## Loading\n\nSomething went wrong. TRY AGAIN\n# Main Results\n\n## Loading\n\nSomething went wrong. TRY AGAIN\n# Page Navigation\n\n| | 1 | [2](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?q=appropriate&sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&ei=Se27ZpWCPOO10PEPt-fykQQ&start=10&sa=N&sstk=AagrsugFx8x6xzA6s-rrMdvrWMNW8Zo-gozUX9M7mhT2Q2IdXKusUGSA00WnkO2zeUtFS0GtE8aU5WqfKh-fHlHj0D5h5iIKWzEChw&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQ8tMDegQIBBAE) | [3](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?q=appropriate&sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&ei=Se27ZpWCPOO10PEPt-fykQQ&start=20&sa=N&sstk=AagrsugFx8x6xzA6s-rrMdvrWMNW8Zo-gozUX9M7mhT2Q2IdXKusUGSA00WnkO2zeUtFS0GtE8aU5WqfKh-fHlHj0D5h5iIKWzEChw&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQ8tMDegQIBBAG) | [4](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?q=appropriate&sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&ei=Se27ZpWCPOO10PEPt-fykQQ&start=30&sa=N&sstk=AagrsugFx8x6xzA6s-rrMdvrWMNW8Zo-gozUX9M7mhT2Q2IdXKusUGSA00WnkO2zeUtFS0GtE8aU5WqfKh-fHlHj0D5h5iIKWzEChw&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQ8tMDegQIBBAI) | [5](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?q=appropriate&sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&ei=Se27ZpWCPOO10PEPt-fykQQ&start=40&sa=N&sstk=AagrsugFx8x6xzA6s-rrMdvrWMNW8Zo-gozUX9M7mhT2Q2IdXKusUGSA00WnkO2zeUtFS0GtE8aU5WqfKh-fHlHj0D5h5iIKWzEChw&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQ8tMDegQIBBAK) | [6](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?q=appropriate&sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&ei=Se27ZpWCPOO10PEPt-fykQQ&start=50&sa=N&sstk=AagrsugFx8x6xzA6s-rrMdvrWMNW8Zo-gozUX9M7mhT2Q2IdXKusUGSA00WnkO2zeUtFS0GtE8aU5WqfKh-fHlHj0D5h5iIKWzEChw&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQ8tMDegQIBBAM) | [7](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?q=appropriate&sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&ei=Se27ZpWCPOO10PEPt-fykQQ&start=60&sa=N&sstk=AagrsugFx8x6xzA6s-rrMdvrWMNW8Zo-gozUX9M7mhT2Q2IdXKusUGSA00WnkO2zeUtFS0GtE8aU5WqfKh-fHlHj0D5h5iIKWzEChw&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQ8tMDegQIBBAO) | [8](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?q=appropriate&sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&ei=Se27ZpWCPOO10PEPt-fykQQ&start=70&sa=N&sstk=AagrsugFx8x6xzA6s-rrMdvrWMNW8Zo-gozUX9M7mhT2Q2IdXKusUGSA00WnkO2zeUtFS0GtE8aU5WqfKh-fHlHj0D5h5iIKWzEChw&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQ8tMDegQIBBAQ) | [9](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?q=appropriate&sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&ei=Se27ZpWCPOO10PEPt-fykQQ&start=80&sa=N&sstk=AagrsugFx8x6xzA6s-rrMdvrWMNW8Zo-gozUX9M7mhT2Q2IdXKusUGSA00WnkO2zeUtFS0GtE8aU5WqfKh-fHlHj0D5h5iIKWzEChw&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQ8tMDegQIBBAS) | [10](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?q=appropriate&sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&ei=Se27ZpWCPOO10PEPt-fykQQ&start=90&sa=N&sstk=AagrsugFx8x6xzA6s-rrMdvrWMNW8Zo-gozUX9M7mhT2Q2IdXKusUGSA00WnkO2zeUtFS0GtE8aU5WqfKh-fHlHj0D5h5iIKWzEChw&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQ8tMDegQIBBAU) | [Next](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/search?q=appropriate&sca_esv=23b8fd744dd7098e&sca_upv=1&sxsrf=ADLYWILHkJTxrhDEmWv9tz7lVIU3xcFlcA:1723592009993&ei=Se27ZpWCPOO10PEPt-fykQQ&start=10&sa=N&sstk=AagrsugFx8x6xzA6s-rrMdvrWMNW8Zo-gozUX9M7mhT2Q2IdXKusUGSA00WnkO2zeUtFS0GtE8aU5WqfKh-fHlHj0D5h5iIKWzEChw&ved=2ahUKEwjVyvjLkPOHAxXjGjQIHbezPEIQ8NMDegQIBBAW) |\n\n## Complementary Results\n\nTranslate to Choose language Afrikaans Akan Albanian Amharic Arabic Armenian Assamese Aymara Azerbaijani Bambara Bangla Basque Belarusian Bhojpuri Bosnian Bulgarian Burmese Catalan Cebuano Central Kurdish Chinese (Simplified) Chinese (Traditional) Corsican Croatian Czech Danish Divehi Dogri Dutch Esperanto Estonian Ewe Filipino Finnish French Galician Ganda Georgian German Goan Konkani Greek Guarani Gujarati Haitian Creole Hausa Hawaiian Hebrew Hindi Hmong Hungarian Icelandic Igbo Iloko Indonesian Irish Italian Japanese Javanese Kannada Kazakh Khmer Kinyarwanda Korean Krio Kurdish Kyrgyz Lao Latin Latvian Lingala Lithuanian Luxembourgish Macedonian Maithili Malagasy Malay Malayalam Maltese Manipuri (Meitei Mayek) Māori Marathi Mizo Mongolian Nepali Northern Sotho Norwegian Nyanja Odia Oromo Pashto Persian Polish Portuguese Punjabi Quechua Romanian Russian Samoan Sanskrit Scottish Gaelic Serbian Shona Sindhi Sinhala Slovak Slovenian Somali Southern Sotho Spanish Sundanese Swahili Swedish Tajik Tamil Tatar Telugu Thai Tigrinya Tsonga Turkish Turkmen Ukrainian Urdu Uyghur Uzbek Vietnamese Welsh Western Frisian Xhosa Yiddish Yoruba Zulu See translations in 100+ languages\n# Footer Links\n\n Downtown San Jose, San Jose, CA - Based on your past activity - Update location Can\'t update your location Learn more Updating location...'), Document(metadata={'BrowsingSessionId': '48573766', 'VisitedWebPageURL': 'https://myanimelist.net/', 'VisitedWebPageTitle': 'MyAnimeList.net - Anime and Manga Database and Community', 'VisitedWebPageDateWithTimeInISOString': '2024-08-13T16:49:24-07:00', 'VisitedWebPageReffererURL': 'https://www.google.com/search?q=appropiate&oq=appropiate&gs_lcrp=EgZjaHJvbWUyBggAEEUYOTIMCAEQABgKGLEDGIAEMgkIAhAAGAoYgAQyDAgDEAAYChixAxiABDIMCAQQABgKGLEDGIAEMgwIBRAAGAoYsQMYgAQyCQgGEAAYChiABDIPCAcQABgKGIMBGLEDGIAEMgwICBAAGAoYsQMYgAQyDwgJEC4YChiDARixAxiABNIBBzU0MGowajeoAgCwAgA&sourceid=chrome&ie=UTF-8', 'VisitedWebPageVisitDurationInMilliseconds': 201318, 'VisitedWebPageContent': None}, page_content='\n# Welcome to MyAnimeList.net\\!\n\n\n\n [Visit MALxJapan](https://mxj.myanimelist.net/) MALxJapan -More than just anime-  Learn how to draw anime & manga from Japanese pros🎨 .png) Your guide to 2024\'s Must-Read Manga is here 📖 .png) Join the【OSHI NO KO】Official MAL Club 🌟\n\n [View More](https://myanimelist.net/anime/season)\n## Summer 2024 Anime\n\n- \n### "Oshi no Ko" 2nd Season\n\n\n- \n### Tokidoki Bosotto Russia-go de Dereru Tonari no Alya-san\n\n\n- \n### Kami no Tou: Ouji no Kikan\n\n\n- \n### Shikanoko Nokonoko Koshitantan\n\n\n- \n### Fairy Tail: 100-nen Quest\n\n\n- \n### Tsue to Tsurugi no Wistoria\n\n\n- \n### Isekai Suicide Squad\n\n\n- \n### Gimai Seikatsu\n\n\n- \n### Isekai Shikkaku\n\n\n- \n### Giji Harem\n\n\n- \n### Make Heroine ga Oosugiru\\!\n\n\n- \n### Hazurewaku no "Joutai Ijou Skill" de Saikyou ni Natta Ore ga Subete wo Juurin suru made\n\n\n- \n### Nige Jouzu no Wakagimi\n\n\n- \n### 2.5-jigen no Ririsa\n\n\n- \n### Ore wa Subete wo "Parry" suru: Gyaku Kanchigai no Sekai Saikyou wa Boukensha ni Naritai\n\n\n- \n### Kimi to Boku no Saigo no Senjou, Aruiwa Sekai ga Hajimaru Seisen Season II\n\n\n- \n### Maougun Saikyou no Majutsushi wa Ningen datta\n\n\n- \n### Shoushimin Series\n\n\n- \n### Shinmai Ossan Boukensha, Saikyou Party ni Shinu hodo Kitaerarete Muteki ni Naru.\n\n\n- \n### NieR:Automata Ver1.1a Part 2\n\n\n\n\n\n [View More](https://myanimelist.net/store?_location=mal_mid_slider) Manga Store\n-  [Slasher Maidens](https://myanimelist.net/store/manga/1013/Slasher_Maidens?_location=mal_mid_slider)\n-  [Peach Girl NEXT](https://myanimelist.net/store/manga/479/Peach_Girl_NEXT?_location=mal_mid_slider)\n-  [THE FOX\'S KISS](https://myanimelist.net/store/manga/603/THE_FOXS_KISS?_location=mal_mid_slider)\n-  [Kagerou Daze (light novel)](https://myanimelist.net/store/manga/868/Kagerou_Daze_light_novel?_location=mal_mid_slider)\n-  [The Devil Is a Part-Timer! (light novel)](https://myanimelist.net/store/manga/905/The_Devil_Is_a_Part-Timer_light_novel?_location=mal_mid_slider)\n-  [Woof Woof Story: I Told You to Turn Me Into a Pampered Pooch, Not Fenrir!](https://myanimelist.net/store/manga/646/Woof_Woof_Story__I_Told_You_to_Turn_Me_Into_a_Pampered_Pooch_Not_Fenrir?_location=mal_mid_slider)\n-  [Life Lessons with Uramichi Oniisan](https://myanimelist.net/store/manga/981/Life_Lessons_with_Uramichi_Oniisan?_location=mal_mid_slider)\n-  [The Garden of Words](https://myanimelist.net/store/manga/93/The_Garden_of_Words?_location=mal_mid_slider)\n-  [Police in a Pod](https://myanimelist.net/store/manga/1061/Police_in_a_Pod?_location=mal_mid_slider)\n-  [Baccano!](https://myanimelist.net/store/manga/250/Baccano?_location=mal_mid_slider)\n-  [We\'re On Our Own From Here](https://myanimelist.net/store/manga/1086/Were_On_Our_Own_From_Here?_location=mal_mid_slider)\n-  [The Demon Sword Master of Excalibur Academy (light novel)](https://myanimelist.net/store/manga/953/The_Demon_Sword_Master_of_Excalibur_Academy_light_novel?_location=mal_mid_slider)\n-  [A Sign of Affection](https://myanimelist.net/store/manga/778/A_Sign_of_Affection?_location=mal_mid_slider)\n-  [The Seven Deadly Sins: Four Knights of the Apocalypse](https://myanimelist.net/store/manga/1125/The_Seven_Deadly_Sins__Four_Knights_of_the_Apocalypse?_location=mal_mid_slider)\n-  [Éclair](https://myanimelist.net/store/manga/623/%C3%89clair?_location=mal_mid_slider)\n\n\n\n [View More](https://myanimelist.net/watch/episode)\n## Latest Updated Episode Videos\n\n- [Episode 28](https://myanimelist.net/anime/52991/Sousou_no_Frieren/episode/28) [Episode 27](https://myanimelist.net/anime/52991/Sousou_no_Frieren/episode/27)\n### Sousou no Frieren\n- [Episode 6](https://myanimelist.net/anime/57099/Na_Nare_Hana_Nare/episode/6) [Episode 5](https://myanimelist.net/anime/57099/Na_Nare_Hana_Nare/episode/5)\n### Na Nare Hana Nare\n- [Episode 1](https://myanimelist.net/anime/56179/Delicos_Nursery/episode/1)\n### Delico\'s Nursery\n- [Episode 1](https://myanimelist.net/anime/57192/Yeosin_Gangnim/episode/1)\n### Yeosin Gangnim\n- [Episode 6](https://myanimelist.net/anime/58426/Shikanoko_Nokonoko_Koshitantan/episode/6) [Episode 5](https://myanimelist.net/anime/58426/Shikanoko_Nokonoko_Koshitantan/episode/5)\n### Shikanoko Nokonoko Koshitantan\n- [Episode 25](https://myanimelist.net/anime/52347/Shangri-La_Frontier__Kusoge_Hunter_Kamige_ni_Idoman_to_su/episode/25) [Episode 24](https://myanimelist.net/anime/52347/Shangri-La_Frontier__Kusoge_Hunter_Kamige_ni_Idoman_to_su/episode/24)\n### Shangri-La Frontier: Kusoge Hunter, Kamige ni Idoman to su\n- [Episode 3](https://myanimelist.net/anime/56062/Naze_Boku_no_Sekai_wo_Daremo_Oboeteinai_no_ka/episode/3) [Episode 2](https://myanimelist.net/anime/56062/Naze_Boku_no_Sekai_wo_Daremo_Oboeteinai_no_ka/episode/2)\n### Naze Boku no Sekai wo Daremo Oboeteinai no ka?\n- [Episode 4](https://myanimelist.net/anime/53626/Bye_Bye_Earth/episode/4) [Episode 3](https://myanimelist.net/anime/53626/Bye_Bye_Earth/episode/3)\n### Bye Bye, Earth\n- [Episode 48](https://myanimelist.net/anime/859/Digimon_Savers/episode/48) [Episode 47](https://myanimelist.net/anime/859/Digimon_Savers/episode/47)\n### Digimon Savers\n- [Episode 12](https://myanimelist.net/anime/53439/Boushoku_no_Berserk/episode/12) [Episode 11](https://myanimelist.net/anime/53439/Boushoku_no_Berserk/episode/11)\n### Boushoku no Berserk\n- [Episode 4](https://myanimelist.net/anime/52367/Isekai_Shikkaku/episode/4) [Episode 3](https://myanimelist.net/anime/52367/Isekai_Shikkaku/episode/3)\n### Isekai Shikkaku\n- [Episode 3](https://myanimelist.net/anime/58059/Tsue_to_Tsurugi_no_Wistoria/episode/3) [Episode 2](https://myanimelist.net/anime/58059/Tsue_to_Tsurugi_no_Wistoria/episode/2)\n### Tsue to Tsurugi no Wistoria\n- [Episode 4](https://myanimelist.net/anime/52635/Kami_no_Tou__Ouji_no_Kikan/episode/4) [Episode 3](https://myanimelist.net/anime/52635/Kami_no_Tou__Ouji_no_Kikan/episode/3)\n### Kami no Tou: Ouji no Kikan\n- [Episode 3](https://myanimelist.net/anime/57524/Make_Heroine_ga_Oosugiru/episode/3) [Episode 2](https://myanimelist.net/anime/57524/Make_Heroine_ga_Oosugiru/episode/2)\n### Make Heroine ga Oosugiru\\!\n- [Episode 4](https://myanimelist.net/anime/54724/Nige_Jouzu_no_Wakagimi/episode/4) [Episode 3](https://myanimelist.net/anime/54724/Nige_Jouzu_no_Wakagimi/episode/3)\n### Nige Jouzu no Wakagimi\n- [Episode 11](https://myanimelist.net/anime/54789/Boku_no_Hero_Academia_7th_Season/episode/11) [Episode 10](https://myanimelist.net/anime/54789/Boku_no_Hero_Academia_7th_Season/episode/10)\n### Boku no Hero Academia 7th Season\n- [Episode 4](https://myanimelist.net/anime/58357/Tensui_no_Sakuna-hime/episode/4) [Episode 3](https://myanimelist.net/anime/58357/Tensui_no_Sakuna-hime/episode/3)\n### Tensui no Sakuna-hime\n- [Episode 5](https://myanimelist.net/anime/58272/Boku_no_Tsuma_wa_Kanjou_ga_Nai/episode/5) [Episode 4](https://myanimelist.net/anime/58272/Boku_no_Tsuma_wa_Kanjou_ga_Nai/episode/4)\n### Boku no Tsuma wa Kanjou ga Nai\n- [Episode 2](https://myanimelist.net/anime/50855/Yamato_yo_Towa_ni__Rebel_3199/episode/2) [Episode 1](https://myanimelist.net/anime/50855/Yamato_yo_Towa_ni__Rebel_3199/episode/1)\n### Yamato yo, Towa ni: Rebel 3199\n- [Episode 4](https://myanimelist.net/anime/56063/NieR_Automata_Ver11a_Part_2/episode/4) [Episode 3](https://myanimelist.net/anime/56063/NieR_Automata_Ver11a_Part_2/episode/3)\n### NieR:Automata Ver1.1a Part 2\n- [Episode 12](https://myanimelist.net/anime/53407/Bartender__Kami_no_Glass/episode/12) [Episode 11](https://myanimelist.net/anime/53407/Bartender__Kami_no_Glass/episode/11)\n### Bartender: Kami no Glass\n- [Episode 4](https://myanimelist.net/anime/54968/Giji_Harem/episode/4) [Episode 3](https://myanimelist.net/anime/54968/Giji_Harem/episode/3)\n### Giji Harem\n- [Episode 4](https://myanimelist.net/anime/52481/Gimai_Seikatsu/episode/4) [Episode 3](https://myanimelist.net/anime/52481/Gimai_Seikatsu/episode/3)\n### Gimai Seikatsu\n- [Episode 4](https://myanimelist.net/anime/57325/Ramen_Akaneko/episode/4) [Episode 3](https://myanimelist.net/anime/57325/Ramen_Akaneko/episode/3)\n### Ramen Akaneko\n- [Episode 15](https://myanimelist.net/anime/50559/Megaton-kyuu_Musashi_2nd_Season/episode/15) [Episode 14](https://myanimelist.net/anime/50559/Megaton-kyuu_Musashi_2nd_Season/episode/14)\n### Megaton-kyuu Musashi 2nd Season\n- [Episode 5](https://myanimelist.net/anime/57876/Maougun_Saikyou_no_Majutsushi_wa_Ningen_datta/episode/5) [Episode 4](https://myanimelist.net/anime/57876/Maougun_Saikyou_no_Majutsushi_wa_Ningen_datta/episode/4)\n### Maougun Saikyou no Majutsushi wa Ningen datta\n- [Episode 12](https://myanimelist.net/anime/51648/Nozomanu_Fushi_no_Boukensha/episode/12) [Episode 11](https://myanimelist.net/anime/51648/Nozomanu_Fushi_no_Boukensha/episode/11)\n### Nozomanu Fushi no Boukensha\n- [Episode 1](https://myanimelist.net/anime/49981/Kimi_to_Boku_no_Saigo_no_Senjou_Aruiwa_Sekai_ga_Hajimaru_Seisen_Season_II/episode/1)\n### Kimi to Boku no Saigo no Senjou, Aruiwa Sekai ga Hajimaru Seisen Season II\n- [Episode 4](https://myanimelist.net/anime/54744/Tokidoki_Bosotto_Russia-go_de_Dereru_Tonari_no_Alya-san/episode/4) [Episode 3](https://myanimelist.net/anime/54744/Tokidoki_Bosotto_Russia-go_de_Dereru_Tonari_no_Alya-san/episode/3)\n### Tokidoki Bosotto Russia-go de Dereru Tonari no Alya-san\n- [Episode 12](https://myanimelist.net/anime/50204/Tokyo_24-ku/episode/12) [Episode 11](https://myanimelist.net/anime/50204/Tokyo_24-ku/episode/11)\n### Tokyo 24-ku\n\n\n\n\n\n [View More](https://myanimelist.net/watch/promotion/popular)\n## Most Popular Anime Trailers\n\n- PV 1 play\n### Shingeki no Kyojin\n- PV play\n### Death Note\n- Announcement play\n### Fullmetal Alchemist: Brotherhood\n- PV 1 play\n### One Punch Man\n- PV English dub version play\n### Sword Art Online\n- Announcement play\n### Kimetsu no Yaiba\n- PV 1 play\n### Boku no Hero Academia\n- PV 1 play\n### Hunter x Hunter (2011)\n- PV Madman ver. play\n### Tokyo Ghoul\n- Announcement 1 play\n### Kimi no Na wa.\n- PV 1 play\n### Shingeki no Kyojin Season 2\n- Announcement play\n### Jujutsu Kaisen\n- PV March 2011 version play\n### Steins;Gate\n- Announcement PV play\n### Boku no Hero Academia 2nd Season\n- PV Viz Media version play\n### Naruto: Shippuuden\n- PV 1 play\n### No Game No Life\n- Announcement play\n### Shingeki no Kyojin Season 3\n- PV Madman 32, English dub version play\n### One Piece\n- Announcement play\n### Koe no Katachi\n- Announcement CM play\n### Shingeki no Kyojin Season 3 Part 2\n- Trailer 1 play\n### Shigatsu wa Kimi no Uso\n- PV English dub version play\n### Toradora\\!\n- PV 1 play\n### Re:Zero kara Hajimeru Isekai Seikatsu\n- Announcement play\n### Boku no Hero Academia 3rd Season\n- PV 1 play\n### Noragami\n- CM 3 play\n### Boku dake ga Inai Machi\n- PV 2 play\n### Akame ga Kill\\!\n- Teaser PV play\n### Mob Psycho 100\n- PV play\n### Angel Beats\\!\n- Netflix Trailer 2015 play\n### Nanatsu no Taizai\n\n\n\n [View More](https://myanimelist.net/news)\n## Anime & Manga News\n\n \n### Focus: Dive into the World of Webtoons with \'Omniscient Reader\'s Viewpoint\'\n\nOmniscient Reader\'s Viewpoint, written by singNsong, is a beloved web novel that has captured the hearts of readers with its unique plot and diverse cast of characters. The story follows Kim Dokja, an ordinary office worker whose reality changes to... [read more](https://myanimelist.net/news/71537758)\n\nYesterday, 7:24 PM by [tingy](https://myanimelist.net/profile/tingy) | [Discuss (1 comment)](https://myanimelist.net/forum/?topicid=2173977)\n\n \n### \'Hamidashi Creative\' Reveals Main Cast, Additional Staff, Teaser Promo, Fall 2024 Premiere\n\nAn official website opened for the Hamidashi Creative television anime series on Monday and revealed the main cast, additional staff, main visual (pictured), and a teaser promotional video. The 12-episode short anime is scheduled to premiere on Octob... [read more](https://myanimelist.net/news/71537051)\n\nYesterday, 3:07 PM by [DatRandomDude](https://myanimelist.net/profile/DatRandomDude) | [Discuss (4 comments)](https://myanimelist.net/forum/?topicid=2173957)\n\n \n### \'Ookami to Koushinryou: Merchant Meets the Wise Wolf\' Announces Pair of Additional Cast\n\nThe official website for the Ookami to Koushinryou: Merchant Meets the Wise Wolf (Spice & Wolf: Merchant Meets the Wise Wolf) television anime revealed on Tuesday a pair of additional cast and a new key visual for the upcoming arc, titled Ikyоu no Ka... [read more](https://myanimelist.net/news/71536827)\n\nYesterday, 2:09 PM by [DatRandomDude](https://myanimelist.net/profile/DatRandomDude) | [Discuss (2 comments)](https://myanimelist.net/forum/?topicid=2173954)\n\n \n### Main Staff for \'The One Piece\' Anime Series Announced\n\nThe second day of the One Piece Day’24 event revealed the main staff for WIT Studio\'s The One Piece anime remake on Sunday. The new anime series will begin from the Higashi no Umi-hen (Eastern Sea Arc) and is set to be released on Netflix. Staff Dire... [read more](https://myanimelist.net/news/71528597)\n\nAug 10, 7:10 PM by [DatRandomDude](https://myanimelist.net/profile/DatRandomDude) | [Discuss (52 comments)](https://myanimelist.net/forum/?topicid=2173702)\n\n\n\n [View More](https://myanimelist.net/forum/)\n## Recent Anime Discussions\n\n### I was decieved.\n\nby [AnimePedestrian](https://myanimelist.net/profile/AnimePedestrian) (32 replies)\n\n### Do you think most Reverse Harems are better than Harems ?\n\nby [CoolitzHubertXVI](https://myanimelist.net/profile/CoolitzHubertXVI) (60 replies)\n\n### do \'no-kill policy\' MCs get on your nerves?\n\nby [ame](https://myanimelist.net/profile/ame) (56 replies)\n\n### Anyone watched the leaked Dandadan episodes?\n\nby [LowIqSalty](https://myanimelist.net/profile/LowIqSalty) (12 replies)\n\n\n\n [View More](https://myanimelist.net/featured) Featured Articles  [Which Wholesome Romance Novel Should Be Adapted to Manga?](https://myanimelist.net/featured/2390/Which_Wholesome_Romance_Novel_Should_Be_Adapted_to_Manga)\n\nHere are the 10 finalists from the MAL x Honeyfeed Writing Contest 2023! Try them out and let us know which web novels you want to see brought to life as published light novel or manga. by [MAL_editing_team](https://myanimelist.net/profile/MAL_editing_team)\n\n**43,666** views\n\n  [GUNMACHAN, please teach us about Gunma, Japan!](https://myanimelist.net/featured/2386/GUNMACHAN_please_teach_us_about_Gunma_Japan)\n\nDo you know which popular anime have been set in Gunma prefecture? Find out with us as we learn more about this prefecture with the adorable GUNMACHAN! by [Kineta](https://myanimelist.net/profile/Kineta)\n\n**32,093** views\n\n  [My Dress-Up Darling: An Interview with the Director, Part 1](https://myanimelist.net/featured/2384/My_Dress-Up_Darling__An_Interview_with_the_Director_Part_1)\n\n*My Dress-Up Darling* has sold over 8million copies, with a hit anime in Winter 2022 and a sequel in the way. It\'s about cosplay-loving Marin and Wakana, an aspiring doll craftsman. Fans were captivated by the slow-burning love and the realistic cosplay. We got the details from the director. by [Febri](https://myanimelist.net/profile/Febri)\n\n**80,778** views\n\nSpoiler  [What Made Wonder Egg Priority Such a Special Anime, Part 1](https://myanimelist.net/featured/2379/What_Made_Wonder_Egg_Priority_Such_a_Special_Anime_Part_1)\n\nWonder Egg Priority is the first anime series created by hit television drama screenwriter, Shinji Nojima. It\'s an unique show imbued with the passion of its young team of creators. In this special interview, we asked the core staff on the series what made Wonder Egg Priority so outstanding. by [Febri](https://myanimelist.net/profile/Febri)\n\n**97,661** views\n\nSpoiler\n\n [View More](https://myanimelist.net/reviews.php?t=anime)\n## Latest Anime Reviews\n\n  Overall Rating: 8\n### Chiyu Mahou no Machigatta Tsukaikata\n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=49613&hideLayout=1) The story follows the usual Isekai pattern, the protagonists are summoned to the other because they supposedly have extraordinary abilities and when they arrive, they undergo severe training, the difference in this story is that Usato has healing powers and also has to train both mentally and physically; soon Rose, the captain of the Rescue Group, appears,... [read more](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/reviews.php?id=534097)\n\n56 minutes ago\n by [Fernando-Senpai](https://myanimelist.net/profile/Fernando-Senpai)  Overall Rating: 7\n### 5-toubun no Hanayome\n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=38101&hideLayout=1) This is the first review I have written. I\'m not a particular weeb who obsesses over minor details, but I generally give ratings compared to other shows I have watched and attempted to justify the numbers in this review. \n\nStory - 7.5/10 \nA nerdy kid Uesugi with a rough backstory takes on a tutoring job of 5 quintuplets so he can help repay family... [read more](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/reviews.php?id=534094)\n\n2 hours ago\n by [CryptPrice](https://myanimelist.net/profile/CryptPrice)  Overall Rating: 4\n### Tasuuketsu\n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=57945&hideLayout=1) feels kinda like a bad "rip-off" of the most basic version of Alice in Borderland. It tries to be this series where you need to out smart people, like in NGNL, but it fails horribly, the rules of the game are explained, but I think pretty weirdly, maybe I was just to distracted by how boring it was explained, that I didn\'t quite grasped it. \nIts like... [read more](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/reviews.php?id=534091)\n\nPreliminary 3 hours ago\n by [tsukito_ai](https://myanimelist.net/profile/tsukito_ai) Overall Rating: 7\n### Nana Toshi Monogatari: Hokkyokukai Sensen\n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=4617&hideLayout=1) Nana Toshi is a 2-episode science fiction based on the manga with the same name. \nIt offers a small glimpse into the manga featuring a short battle between two city nations.\nThe drawing style is mature, concise and the main selling point of this short story. \nAnimation is simple but clever and well executed. \nVoice acting and music are kept simple... [read more](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/reviews.php?id=534090)\n\n3 hours ago\n by [Asbjoernson](https://myanimelist.net/profile/Asbjoernson)\n\n\n\n [View More](https://myanimelist.net/recommendations.php?s=recentrecs&t=anime)\n## Latest Anime Recommendations\n\n| If you liked\n### Watashi ga Motete Dousunda\n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=32899&hideLayout=1) | ...then you might like\n### Yeosin Gangnim\n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=57192&hideLayout=1) |\n\nA girl who wasn\'t attractive becomes beautiful and completely changes her reality. Anime rec by [CosmicGirl](https://myanimelist.net/profile/CosmicGirl) - 2 hours ago| If you liked\n### Gin no Saji\n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=16918&hideLayout=1) | ...then you might like\n### Grand Blue\n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=37105&hideLayout=1) |\n\nBoth have a confused mc and gradually shifting from hating to liking it . Both are funny . Grand blue can be edgy at some times but silver spoon has a perfect amount of comedy in it Anime rec by [ilikec0ws](https://myanimelist.net/profile/ilikec0ws) - 6 hours ago| If you liked\n### Corpse Party: Tortured Souls - Bougyakusareta Tamashii no Jukyou\n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=15037&hideLayout=1) | ...then you might like\n### Dark Gathering\n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=52505&hideLayout=1) |\n\nAlthough the story isn\'t as goo as either Dark Gathering or the game this is adapting, it is worth a watch if you want to watch some other bloody anime Anime rec by [Ga1_ahad](https://myanimelist.net/profile/Ga1_ahad) - 10 hours ago| If you liked\n### FLCL\n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=227&hideLayout=1) | ...then you might like\n### Poputepipikku\n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=35330&hideLayout=1) |\n\nIf you like Poputepipikku you\'ll definetly like FLCL, you\'ll find the same random antics, crazy spontaneous action and appealing visuals in FLCL that you did in Poputepipikku!!! Anime rec by [FunkyFelineZ](https://myanimelist.net/profile/FunkyFelineZ) - Yesterday, 7:50 PM\n\n\n\n **More**\n## Top Airing Anime\n\n- 1  \n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=57864&hideLayout=1&click_type=list-add-top)\n### Monogatari Series: Off & Monster Season\n\n ONA,\n 0 eps,\n scored 8.97 \n 57,322 members\n- 2  \n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=21&hideLayout=1&click_type=list-add-top)\n### One Piece\n\n TV,\n 0 eps,\n scored 8.72 \n 2,403,763 members\n- 3  \n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=55791&hideLayout=1&click_type=list-add-top)\n### "Oshi no Ko" 2nd Season\n\n TV,\n 13 eps,\n scored 8.48 \n 294,402 members\n- 4  \n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=51039&hideLayout=1&click_type=list-add-top)\n### Doupo Cangqiong: Nian Fan\n\n ONA,\n 157 eps,\n scored 8.39 \n 7,696 members\n- 5  \n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=57524&hideLayout=1&click_type=list-add-top)\n### Make Heroine ga Oosugiru\\!\n\n TV,\n 12 eps,\n scored 8.30 \n 99,327 members\n\n\n\n\n\n **More**\n## Top Upcoming Anime\n\n- 1  \n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=52807&hideLayout=1&click_type=list-add-top)\n### One Punch Man 3\n\n TV,\n 0 eps,\n scored N/A \n 236,114 members\n- 2  \n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=54857&hideLayout=1&click_type=list-add-top)\n### Re:Zero kara Hajimeru Isekai Seikatsu 3rd Season\n\n TV,\n 0 eps,\n scored N/A \n 175,403 members\n- 3  \n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=49233&hideLayout=1&click_type=list-add-top)\n### Youjo Senki II\n\n TV,\n 0 eps,\n scored N/A \n 152,129 members\n- 4  \n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=53065&hideLayout=1&click_type=list-add-top)\n### Sono Bisque Doll wa Koi wo Suru (Zoku-hen)\n\n Unknown,\n 0 eps,\n scored N/A \n 133,295 members\n- 5  \n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=40333&hideLayout=1&click_type=list-add-top)\n### Uzumaki\n\n TV,\n 4 eps,\n scored N/A \n 127,172 members\n\n\n\n\n\n **More**\n## Most Popular Anime\n\n- 1  \n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=16498&hideLayout=1&click_type=list-add-top)\n### Shingeki no Kyojin\n\n TV,\n 25 eps,\n scored 8.55 \n 4,003,692 members\n- 2  \n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=1535&hideLayout=1&click_type=list-add-top)\n### Death Note\n\n TV,\n 37 eps,\n scored 8.62 \n 3,957,227 members\n- 3  \n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=5114&hideLayout=1&click_type=list-add-top)\n### Fullmetal Alchemist: Brotherhood\n\n TV,\n 64 eps,\n scored 9.09 \n 3,391,662 members\n- 4  \n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=30276&hideLayout=1&click_type=list-add-top)\n### One Punch Man\n\n TV,\n 12 eps,\n scored 8.49 \n 3,264,009 members\n- 5  \n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=11757&hideLayout=1&click_type=list-add-top)\n### Sword Art Online\n\n TV,\n 25 eps,\n scored 7.21 \n 3,106,755 members\n- 6  \n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=38000&hideLayout=1&click_type=list-add-top)\n### Kimetsu no Yaiba\n\n TV,\n 26 eps,\n scored 8.46 \n 3,094,859 members\n- 7  \n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=31964&hideLayout=1&click_type=list-add-top)\n### Boku no Hero Academia\n\n TV,\n 13 eps,\n scored 7.86 \n 3,064,546 members\n- 8  \n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=20&hideLayout=1&click_type=list-add-top)\n### Naruto\n\n TV,\n 220 eps,\n scored 8.00 \n 2,890,321 members\n- 9  \n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=11061&hideLayout=1&click_type=list-add-top)\n### Hunter x Hunter (2011)\n\n TV,\n 148 eps,\n scored 9.03 \n 2,883,837 members\n- 10  \n\n [add](https://myanimelist.net/ownlist/anime/add?selected_series_id=22319&hideLayout=1&click_type=list-add-top)\n### Tokyo Ghoul\n\n TV,\n 12 eps,\n scored 7.79 \n 2,853,612 members\n\n'), Document(metadata={'BrowsingSessionId': '48573787', 'VisitedWebPageURL': 'https://www.reddit.com/', 'VisitedWebPageTitle': 'Reddit - Dive into anything', 'VisitedWebPageDateWithTimeInISOString': '2024-08-13T16:49:50-07:00', 'VisitedWebPageReffererURL': 'https://www.google.com/search?gs_ssp=eJzj4tLP1TdIqjBProxXYDRgdGDwYitKTUnJLAEAUiAGug&q=reddit&oq=redd&gs_lcrp=EgZjaHJvbWUqFQgBEC4YJxjHARjJAxjRAxiABBiKBTIGCAAQRRg8MhUIARAuGCcYxwEYyQMY0QMYgAQYigUyBggCEEUYOTIGCAMQRRg8MgYIBBBFGDwyBggFEEUYQTIGCAYQRRhBMgYIBxBFGEHSAQgxODE3ajBqN6gCALACAA&sourceid=chrome&ie=UTF-8', 'VisitedWebPageVisitDurationInMilliseconds': 1844, 'VisitedWebPageContent': None}, page_content=' Best Open sort options [Best](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/best/?feed=home) [Hot](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/hot/?feed=home) [New](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/new/?feed=home) [Top](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/top/?feed=home) [Rising](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/rising/?feed=home) Change post view [Card](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/popup.html?feed=home&feedViewType=cardView) [Compact](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/popup.html?feed=home&feedViewType=compactView)\n\n [Top rated players Cologne 2024 Group Stage](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/GlobalOffensive/comments/1erj5j9/top_rated_players_cologne_2024_group_stage/) [r/GlobalOffensive](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/GlobalOffensive/) [r/GlobalOffensive](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/GlobalOffensive/) r/GlobalOffensive is the home for the Counter-Strike community and a hub for the discussion and sharing of content relevant to Counter-Strike: Global Offensive (CS:GO), and Counter-Strike 2 (CS2).\n\nCounter-Strike enjoys a thriving esports scene and dedicated competitive playerbase, as well as a robust creative community. This is the largest and most active CS sub on Reddit.\n\n2.5M Members 856 Online ADMIN MOD •\n\n<-time->\n 3 hr. ago\n\n-time->\n [Top rated players Cologne 2024 Group Stage](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/GlobalOffensive/comments/1erj5j9/top_rated_players_cologne_2024_group_stage/) [u/Official_Klaviyo](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/user/Official_Klaviyo/) • Promoted Business owners and marketing nerds of Reddit! With Klaviyo AI, you can automate your SMS and email marketing funnels by just writing a few prompts. Type in who you want to reach and what you want them to know, and Klaviyo AI does the rest. Try it out today at the link below ⬇️ klaviyo.com Learn More\n\n [Chat Memory History in Production - Architectures and Methods](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/LangChain/comments/1ergfbf/chat_memory_history_in_production_architectures/) [r/LangChain](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/LangChain/) [r/LangChain](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/LangChain/) LangChain is an open-source framework and developer toolkit that helps developers get LLM applications from prototype to production. \n\nIt is available for Python and Javascript at https://www.langchain.com/.\n\n26K Members 56 Online ADMIN MOD •\n\n<-time->\n 5 hr. ago\n\n-time->\n [Chat Memory History in Production - Architectures and Methods](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/LangChain/comments/1ergfbf/chat_memory_history_in_production_architectures/) Hey guys, we\'re currently working on 2 applications in our company. In this first stage, both of them will be running "offline", meaning that it won\'t be any interaction with the users. On both cases we\'ve created a chain that is taking the data from our Azure SQL Server, sending to the LLM via prompt and the output goes to Service Now. We\'re using 2 separated python scripts for that and they\'re being triggered by a set of internal rules. Since there are now interactions with the users at this first output, we\'re keeping simple. \n\n Now, we would like to test a "copilot" with one of these outputs, a Q&A. The idea here is just to help an engineer to solve a ticket faster. We don\'t want to store the chat history in any database but rather do something similar to Microsoft Copilot by limiting to 10/20 interactions and keep the memory just while the user is interacting with the LLM and completely delete after the session is over. \n\n Which approach you guys think would be good to take in this scenario to push into production? The LangChain methods should be enough or should we go another route? \n\n Thanks! \n\n\n\n [creating next js website builder , is it worth it ,help you design your website and easly get the first stps ready , like login and dashbord and pages , so you start irectly on implimenting your solution and it exports as next js project directory .](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/nextjs/comments/1erc89y/creating_next_js_website_builder_is_it_worth_it/) [r/nextjs](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/nextjs/) [r/nextjs](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/nextjs/) Next.js is a React framework for building full-stack web applications\n\n91K Members 25 Online ADMIN MOD •\n\n<-time->\n 7 hr. ago\n\n-time->\n [creating next js website builder , is it worth it ,help you design your website and easly get the first stps ready , like login and dashbord and pages , so you start irectly on implimenting your solution and it exports as next js project directory .](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/nextjs/comments/1erc89y/creating_next_js_website_builder_is_it_worth_it/)\n\n [Liquid appreciation post](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/GlobalOffensive/comments/1erfzc5/liquid_appreciation_post/) [r/GlobalOffensive](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/GlobalOffensive/) [r/GlobalOffensive](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/GlobalOffensive/) r/GlobalOffensive is the home for the Counter-Strike community and a hub for the discussion and sharing of content relevant to Counter-Strike: Global Offensive (CS:GO), and Counter-Strike 2 (CS2).\n\nCounter-Strike enjoys a thriving esports scene and dedicated competitive playerbase, as well as a robust creative community. This is the largest and most active CS sub on Reddit.\n\n2.5M Members 856 Online ADMIN MOD •\n\n<-time->\n 5 hr. ago\n\n-time->\n [Liquid appreciation post](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/GlobalOffensive/comments/1erfzc5/liquid_appreciation_post/) I\'m so darn impressed with them this tourney, and especially ultimate. I\'m excited for their future. They just tied it up with Faze. I don\'t care if they don\'t win, they are absolutely exceeding all my expectations. \n\n\n\n [NotHotDog is live on product hunt](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/SideProject/comments/1ermhx9/nothotdog_is_live_on_product_hunt/) [r/SideProject](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/SideProject/) [r/SideProject](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/SideProject/) r/SideProject is a subreddit for sharing and receiving constructive feedback on side projects.\n\n179K Members 45 Online ADMIN MOD •\n\n<-time->\n 15 min. ago\n\n-time->\n [NotHotDog is live on product hunt](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/SideProject/comments/1ermhx9/nothotdog_is_live_on_product_hunt/) 🚀 NotHotDog is on Product Hunt! We\'ve made Jian-Yang\'s "genius" actually useful. 🌭 \n\n Why would you like it? It\'s simple: \n\n Debugs APIs faster than Richard can panic-vomit \n\n More reliable than Erlich\'s funding promises \n\n Easier to use than explaining \'middle-out\' to your grandma \n\n Every upvote makes our car doors go like this, not like this 🚗 \n\n If you like it, show some support on Product Hunt [https://www.producthunt.com/posts/nothotdog-alpha](https://www.producthunt.com/posts/nothotdog-alpha) Let\'s make this app hotter than Erlich\'s failed palapa! \n\n We are onboarding alpha users and would love to have you if you are building in Voice AI or using LLMs as part of your API services. \n\n\n\n [Is it possible to play D&D with an LLM yet?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/LocalLLaMA/comments/1eqr8gn/is_it_possible_to_play_dd_with_an_llm_yet/) [r/LocalLLaMA](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/LocalLLaMA/) [r/LocalLLaMA](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/LocalLLaMA/) Subreddit to discuss about Llama, the large language model created by Meta AI.\n\n199K Members 244 Online ADMIN MOD •\n\n<-time->\n 1 day ago\n\n-time->\n• Because you\'ve shown interest in this community\n\n [Is it possible to play D&D with an LLM yet?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/LocalLLaMA/comments/1eqr8gn/is_it_possible_to_play_dd_with_an_llm_yet/) My holy grail for LLMs is playing a satisfying, coherent, and mechanically sound game of Dungeons and Dragons. So far, this has not been possible. Things like RAG has made adherence to rules easier, but the main problem remains: LLMs inherently do not plan, they create on the fly. That leads to a dream-like quality where things just sort of appear out of nowhere, and do not fit the world at large. \n\n A giant context window could sort of help with this, but I\'m not an expert on these things. \n\n Services like AI Dungeon are working on this, but I wanted to ask about possible methods here. I feel like, if it doesn\'t exist now, we\'ll probably get there in 2025 or 2026. \n\n I haven\'t tried gigantic models, like the kind you would rent an A100 for - do those work any better for long-form game-based story telling and adherence to mechanics? \n\n Thanks for any help. \n\n\n\n [Seeking validation on a tool I’m working on](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/nextjs/comments/1er7vmn/seeking_validation_on_a_tool_im_working_on/) [r/nextjs](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/nextjs/) [r/nextjs](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/nextjs/) Next.js is a React framework for building full-stack web applications\n\n91K Members 25 Online ADMIN MOD •\n\n<-time->\n 10 hr. ago\n\n-time->\n [Seeking validation on a tool I’m working on](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/nextjs/comments/1er7vmn/seeking_validation_on_a_tool_im_working_on/) [u/Official_Klaviyo](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/user/Official_Klaviyo/) • Promoted Business owners and marketing nerds of Reddit! With Klaviyo AI, you can automate your SMS and email marketing funnels by just writing a few prompts. Type in who you want to reach and what you want them to know, and Klaviyo AI does the rest. Try it out today at the link below ⬇️ klaviyo.com Learn More\n\n [Can you get cheap market analysis in 2024?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/startups/comments/1erfrnp/can_you_get_cheap_market_analysis_in_2024/) [r/startups](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/startups/) [r/startups](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/startups/) Welcome to /r/startups, the place to discuss startup problems and solutions. Startups are companies that are designed to grow and scale rapidly. Be sure to read and follow all of our rules--we have specific places for common content and requests.\n\n1.7M Members 52 Online ADMIN MOD •\n\n<-time->\n 5 hr. ago\n\n-time->\n [Can you get cheap market analysis in 2024?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/startups/comments/1erfrnp/can_you_get_cheap_market_analysis_in_2024/) Hey there folks, I would like to have a brief market analysis for my startup, I am wondering, if there are any affordable solutions available. Do you know of any reliable startups/ companies that can help for around 500 USD? Truth to be told, a basic analysis would be all I am looking for, I am not expecting insane reports for this price. If any of you has any recommendations or experience in this area, It would be perfect if you could share your expertise. Thanks \n\n\n\n [Seeking Advice: Should I Specialize in Artificial Intelligence or Networking in IT?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/compsci/comments/1erlzz1/seeking_advice_should_i_specialize_in_artificial/) [r/compsci](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/compsci/) [r/compsci](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/compsci/) Computer Science Theory and Application. We share and discuss any content that computer scientists find interesting. People from all walks of life welcome, including hackers, hobbyists, professionals, and academics.\n\n3.4M Members 37 Online ADMIN MOD •\n\n<-time->\n 36 min. ago\n\n-time->\n [Seeking Advice: Should I Specialize in Artificial Intelligence or Networking in IT?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/compsci/comments/1erlzz1/seeking_advice_should_i_specialize_in_artificial/) **Hello, Reddit community\\!** \n\n I am a computer science student currently facing a important decision regarding my future specialization in the IT field. I have narrowed down my choices to two areas: Artificial Intelligence (AI) and Networking.However, I\'m having a hard time deciding which one to pursue. \n\n I would love to hear from professionals and students in both fields. Your insights, experiences, and advice would be incredibly valuable to me in making an informed decision. \n\n Thank you in advance for your help! \n\n\n\n [Learning fast api](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/FastAPI/comments/1epkr7t/learning_fast_api/) [r/FastAPI](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/FastAPI/) [r/FastAPI](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/FastAPI/) FastAPI is a truly ASGI, async, cutting edge framework written in python 3.\n\n12K Members 3 Online ADMIN MOD •\n\n<-time->\n 2 days ago\n\n-time->\n• Because you\'ve shown interest in a similar community\n\n [Learning fast api](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/FastAPI/comments/1epkr7t/learning_fast_api/) I was learning fast api i know about routing and auth and authentication and basic stuff what should i go next cause there is no such roadmap available in internet about fast api \n\n\n\n [I wrote this Swift package out of frustration and then open-sourced it!](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/developersIndia/comments/1er9dba/i_wrote_this_swift_package_out_of_frustration_and/) [r/developersIndia](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/developersIndia/) [r/developersIndia](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/developersIndia/) A wholesome community made by & for software & tech folks in India.\nHave a doubt? Ask it out.\n\n807K Members 97 Online ADMIN MOD •\n\n<-time->\n 9 hr. ago\n\n-time->\n [I wrote this Swift package out of frustration and then open-sourced it!](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/developersIndia/comments/1er9dba/i_wrote_this_swift_package_out_of_frustration_and/) I\'m one of those iOS developers who renders UI programmatically. I write a lot of auto-layout constraints on a everyday basis and often hate doing it. Its a lot of verbose and repetitive code. \n\n [https://github.com/kabir-asani/Silicon](https://github.com/kabir-asani/Silicon) \n\n So I got inspired by SnapKit and wrote out Silicon. Its light-weight in comparison to SnapKit but far more expressive and native to UIKit\'s imperative semantics. \n\n\n\n [Been a chronic marijuana user for 17 years now. I\'ve decided to stop cause it started spiking my anxieties and making me super depressed.](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/leaves/comments/1er5ac3/been_a_chronic_marijuana_user_for_17_years_now/) [r/leaves](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/leaves/) [r/leaves](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/leaves/) This is a support and recovery community for practical discussions about how to quit pot, weed, cannabis, edibles, BHO, shatter, Delta 8, or whatever THC-related product you\'re using, and getting support in staying stopped.\n\n341K Members 86 Online ADMIN MOD •\n\n<-time->\n 12 hr. ago\n\n-time->\n [Been a chronic marijuana user for 17 years now. I\'ve decided to stop cause it started spiking my anxieties and making me super depressed.](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/leaves/comments/1er5ac3/been_a_chronic_marijuana_user_for_17_years_now/) In fact, last couple days that I haven\'t smoked, I\'ve become super depressed and anxious about things, I feel my thinking itself has changed. Is there a way to feel normal ? I\'ve had anxiety in the past but never like this. Super fucking anxious and depressed about nostalgic memories and random thoughts about the future (of not having anybody around/being alone and lonely in old age). I\'ve never had these thoughts . Any advise please ? Will it get better? Thank you in advance. \n\n\n\n [Chroma how to obtain the embedding function or distance from a collection’s metadata?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/LangChain/comments/1er935w/chroma_how_to_obtain_the_embedding_function_or/) [r/LangChain](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/LangChain/) [r/LangChain](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/LangChain/) LangChain is an open-source framework and developer toolkit that helps developers get LLM applications from prototype to production. \n\nIt is available for Python and Javascript at https://www.langchain.com/.\n\n26K Members 56 Online ADMIN MOD •\n\n<-time->\n 10 hr. ago\n\n-time->\n [Chroma how to obtain the embedding function or distance from a collection’s metadata?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/LangChain/comments/1er935w/chroma_how_to_obtain_the_embedding_function_or/) Is it possible to obtain the distance function and the embedding function from a Chroma collection metadata definition ? \n\n [u/TradeStation](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/user/TradeStation/) • Official • Promoted Forever is a long time. Get 50% off TradeStation’s brokerage fees forever when you open a new account. Find, click, and trade in seconds with TradeStation. tradestation.com Learn More\n\n [What’s your take on this? Short-sighted, since YC already accounts for this by picking the smartest founders (highly adaptable)?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/ycombinator/comments/1eralgu/whats_your_take_on_this_shortsighted_since_yc/) [r/ycombinator](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/ycombinator/) [r/ycombinator](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/ycombinator/) News and discussion around Y Combinator and Y Combinator companies.\n\nIn 2005, Y Combinator created a new model for funding early stage startups. Twice a year we invest a small amount of money in a large number of startups.\n\n60K Members 10 Online ADMIN MOD •\n\n<-time->\n 9 hr. ago\n\n-time->\n• Because you visited this community before\n\n [What’s your take on this? Short-sighted, since YC already accounts for this by picking the smartest founders (highly adaptable)?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/ycombinator/comments/1eralgu/whats_your_take_on_this_shortsighted_since_yc/)\n\n [NotHotDog is live on product hunt](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/opensource/comments/1ermggl/nothotdog_is_live_on_product_hunt/) [r/opensource](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/opensource/) [r/opensource](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/opensource/) A subreddit for everything open source related (for this context, we go off the definition of open source here http://en.wikipedia.org/wiki/Open_source)\n\n232K Members 49 Online ADMIN MOD •\n\n<-time->\n 16 min. ago\n\n-time->\n [NotHotDog is live on product hunt](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/opensource/comments/1ermggl/nothotdog_is_live_on_product_hunt/) 🚀 NotHotDog is on Product Hunt! We\'ve made Jian-Yang\'s "genius" actually useful. 🌭 \n\n Why would you like it? It\'s simple: \n\n Debugs APIs faster than Richard can panic-vomit \n\n More reliable than Erlich\'s funding promises \n\n Easier to use than explaining \'middle-out\' to your grandma \n\n Every upvote makes our car doors go like this, not like this 🚗 \n\n If you like it, show some support on Product Hunt [https://www.producthunt.com/posts/nothotdog-alpha](https://www.producthunt.com/posts/nothotdog-alpha) \n\n And GitHub: \n\n [https://github.com/vedhsaka/Nothotdog](https://github.com/vedhsaka/Nothotdog) \n\n Let\'s make this app hotter than Erlich\'s failed palapa! \n\n We are onboarding alpha users and would love to have you if you are building in Voice AI or using LLMs as part of your API services. \n\n\n\n [Does going abroad worth is anymore as the job market there keeps getting worse](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/Indians_StudyAbroad/comments/1erbhjv/does_going_abroad_worth_is_anymore_as_the_job/) [r/Indians_StudyAbroad](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/Indians_StudyAbroad/) [r/Indians_StudyAbroad](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/Indians_StudyAbroad/) Subreddit for Indians to discuss Study Abroad plans and experiences\n\nwiki ->\n https://www.reddit.com/r/Indians_StudyAbroad/wiki/\n\n61K Members 11 Online ADMIN MOD •\n\n<-time->\n 8 hr. ago\n\n-time->\n [Does going abroad worth is anymore as the job market there keeps getting worse](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/Indians_StudyAbroad/comments/1erbhjv/does_going_abroad_worth_is_anymore_as_the_job/) “my_qualifications” are 8.9 CGPA graduated in mass comm & journalism 7.5 ilets test ( but expired) but I can maintain the score n even score higher if I study Graduated in 2023 from a tier 2 college Have about 6 months of work experience but nothing to prove it \n\n I want to go abroad for masters in marketing management (msc) or related degree but right now I am wondering if the job market there is worth it and to which country and uni to apply for \n\n I am hard working and smart too but have been wondering if it’s worth going to uk or any other European countries for masters if the job market in uk is really bad (since the fees for these degrees are at least 30 L) and other countries have a language requirement which I can learn but it takes time to become fluent. Does anyone know what country would be best for my field and also kind of worth it. \n\n\n\n [So many no-code AI website builders, what\'s the sauce?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/SaaS/comments/1erll74/so_many_nocode_ai_website_builders_whats_the_sauce/) [r/SaaS](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/SaaS/) [r/SaaS](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/SaaS/) Discussions and useful links for SaaS owners, online business owners, and more.\n\n141K Members 42 Online ADMIN MOD •\n\n<-time->\n 53 min. ago\n\n-time->\n [So many no-code AI website builders, what\'s the sauce?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/SaaS/comments/1erll74/so_many_nocode_ai_website_builders_whats_the_sauce/) Does anyone know how the no-code builders ai builders are done? Seems like it\'s simple enough to do because there\'s a bunch of them coming out. \n\n (I don\'t want to build one, just interested as a technical developer) \n\n\n\n [Which major companies don\'t have a toxic work culture for senior engineers, on average?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/sre/comments/1eqxmvo/which_major_companies_dont_have_a_toxic_work/) [r/sre](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/sre/) [r/sre](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/sre/) everything site reliability engineering\n\n28K Members 10 Online ADMIN MOD •\n\n<-time->\n 21 hr. ago\n\n-time->\n• Because you\'ve shown interest in a similar community\n\n [Which major companies don\'t have a toxic work culture for senior engineers, on average?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/sre/comments/1eqxmvo/which_major_companies_dont_have_a_toxic_work/) Companies that are terrible to work at, if online forums are anything to go off of: \n\n - JPMC\n - Capital One\n - Amazon\n - Apple\n - Google & Microsoft (post layoffs, especially in cloud teams, which are most of the ones hiring)\n - pretty much every startup and game dev company\n - Citadel\n - Social media (facebook, reddit, snapchat, especially post-layoff)\n\n I can confirm the bad engineering culture at a couple of these companies. I\'m running out of places to consider viable. \n\n\n\n [Why do I need to smoke weed?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/Petioles/comments/1er7ile/why_do_i_need_to_smoke_weed/) [r/Petioles](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/Petioles/) [r/Petioles](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/Petioles/) Petioles is a positive community for those interested in responsible consumption of Cannabis. Discussions include everything from tolerance breaks, to personal feelings and cravings.\n\n146K Members 4 Online ADMIN MOD •\n\n<-time->\n 11 hr. ago\n\n-time->\n [Why do I need to smoke weed?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/Petioles/comments/1er7ile/why_do_i_need_to_smoke_weed/) false I’m really curious. What is it that makes us need to smoke weed so badly? I really love it. I’m 16, and I’ve used a lot of drugs. All kinds of drugs. Alcohol, MDMA, snow, LSD—you name it. I never thought about it before, but I may have seriously messed up my brain development lmao. And the saddest part is, I don’t even care. I used to be afraid of things like that but I really don’t care anymore.\n\nWhen I smoke weed (or take drugs in general) I feel a sort of calmness in my soul. I get pretty bad panic attacks without, but so long as I have drugs, I know everything will be alright. When I smoke weed specifically, I get a feeling which I have come to describe as a sort of opioid like euphoria—it feels like a warm blanket covers me and the whole world, and I have nothing to worry about. I can choose not to worry—I can’t do that while sober. I just love it so much. I love getting high and watching old movies. I get high and work out, get high and play basketball, get high and read books (which I mostly forget), I even get high to roll up. TV becomes so mesmerizing and beautiful; I become super introspective and I can fully understand the characters and their motivations, and I really feel for them, I can empathise with them. And don’t get me started on smoking blunts, I could do that all day. When I smoke a 3.5+ with hash it feels almost psychedelic for the first hour, then falls into a heavy stone. I smoke alone, with my brother, with my girlfriend, with my friends. I smoke every day. Everything is just better when high.\n\nBut weed is also super isolating, even when I smoke with others I feel disconnected—sort of in my own world. And when I smoke too often, or too much, I feel like I lose my inner world. There’s no simpler way to describe it. Nothing feels like it matters. I don’t even feel like a person. And then I don’t feel like doing anything, even getting out of bed or brushing my teeth. I like poetry, I like philosophy, I like art. I’m an intellectual bastard. But the things that used to interest me and feel so insanely affirming and validating don’t make me feel anything. Just like an over-evolved monkey with societal expectations. When I get really high, I imagine myself as a less evolved mammal, like a squirrel or dog or something. I really do! No exaggeration! I’ve heard that our memories are what we use to build our sense of self, and I feel like THC-related damage to memory is probably what’s responsible for this feeling. But what can I do? Then again it’s probably my fault for using so many drugs as a youth.\n\nMaybe, it’s due to neurodivergence—I have ADHD and anxiety, and I guess you could say I self-medicate. I feel like this is probably the major root of my “addiction.” But I like other drugs too. My favourite drug in the world, which I’ve only tried 3 times, is oxycodone. (Don’t worry, I don’t plan on using it again.) When I was sick with COVID, I took 20mg oxy with acetaminophen—it gave me that feeling I then realised I was chasing my whole life: comfort. It was like somebody laid a warm blanket over me and the whole world finally went quiet. Everything was so quiet. I was watching Seinfeld and I was just so happy. It was kind of spiritual: I realised I was being dumb by being petty and holding grudges, and I forgave everyone I had resentment for. I stepped out onto my porch while it was storming, and it was coming down hard, and for a moment it felt like I experienced true awareness. I couldn’t tell where the rain stopped and where I began. Not a thought in my head, I was just glad to be there. And I wasn’t scared of anything. Not even dying. It almost seemed laughable to be afraid—I was living! There was so much to appreciate! And everything was going to be okay. It sounds trite but that’s truly what I thought, and I’m not the type that’s likely to believe in that kind of corny nonsense without reason. But there was a reason! At that moment there was.\n\nSo my question is: why do I feel this way? Why do I need to use weed (or drugs) to feel this kind of comfort? I’ve been a drug-user for all of my young adult life, do non-users also feel this way? Or did these feelings drive me to use in the first place? All discussion would be appreciated, but if you’re coming from a neurological perspective, I’d rather you refrain from answering, we have ChatGPT for that. I’m curious from an emotional point of view. I relate to real people and real stories.\n\n\n\n [Richard Dolan’s 5-Hour Breakdown of the Wilson UFO Leak: Insights from Someone Who Saw the Memo 13 Years Before It Leaked](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/UFOs/comments/1er98sp/richard_dolans_5hour_breakdown_of_the_wilson_ufo/) [r/UFOs](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/UFOs/) [r/UFOs](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/UFOs/) A community for discussion related to Unidentified Flying Objects. Share your sightings, experiences, news, and investigations. We aim to elevate good research while maintaining healthy skepticism.\n\n2.6M Members 395 Online ADMIN MOD •\n\n<-time->\n 9 hr. ago\n\n-time->\n [Richard Dolan’s 5-Hour Breakdown of the Wilson UFO Leak: Insights from Someone Who Saw the Memo 13 Years Before It Leaked](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/UFOs/comments/1er98sp/richard_dolans_5hour_breakdown_of_the_wilson_ufo/) [u/realPubkey](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/user/realPubkey/) • Promoted With RxDB you can store data locally to make your app work offline\n- \n\n<-figure->\n\n\n-figure->\n- ![]()\n\n<-figure->\n\n\n-figure->\n\n\n\n [New SnapKey function is allowed?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/FACEITcom/comments/1er6fl7/new_snapkey_function_is_allowed/) [r/FACEITcom](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/FACEITcom/) [r/FACEITcom](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/FACEITcom/) FACEIT is the largest platform for competitive gaming on CS2, DOTA 2, League of Legends and many other games. \n\nThis subreddit is where you can express your feedback, appreciation of the platform as well as reporting and discussing issues affecting our community - we appreciate all of this.\n\n70K Members 8 Online ADMIN MOD •\n\n<-time->\n 11 hr. ago\n\n-time->\n [New SnapKey function is allowed?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/FACEITcom/comments/1er6fl7/new_snapkey_function_is_allowed/) Too many rumors about SnapTap function \n\n What about SnapKey? [https://github.com/cafali/SnapKey](https://github.com/cafali/SnapKey) \n\n is it legal and allowed on faceit? \n\n\n\n [Does anyone else prefer to create simple, minimalistic websites?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/webdev/comments/1er2x1a/does_anyone_else_prefer_to_create_simple/) [r/webdev](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/webdev/) [r/webdev](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/webdev/) A community dedicated to all things web development: both front-end and back-end. For more design-related questions, try /r/web_design.\n\n2.5M Members 109 Online ADMIN MOD •\n\n<-time->\n 15 hr. ago\n\n-time->\n [Does anyone else prefer to create simple, minimalistic websites?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/webdev/comments/1er2x1a/does_anyone_else_prefer_to_create_simple/) I am a very creative person, who likes to mix computer science with other passions, and honestly when I build a web page I don\'t want it to be full of beautiful things that actually serve no purpose. I much prefer a relatively simple page, that shows what it actually has to show, that reaches the user directly, to make us understand. Am I the only one here who thinks something like this? \n\n\n\n [Is there really a shortage of tech workers, or do companies want cheap labor?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/codingbootcamp/comments/1eo07k7/is_there_really_a_shortage_of_tech_workers_or_do/) [r/codingbootcamp](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/codingbootcamp/) [r/codingbootcamp](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/codingbootcamp/) A subreddit dedicated to questions and discussions about coding bootcamps.\n\n\nIrrelevant content and referral discount links will be deleted and might get you banned.\n\n50K Members 11 Online ADMIN MOD •\n\n<-time->\n 4 days ago\n\n-time->\n• Because you\'ve shown interest in a similar community\n\n [Is there really a shortage of tech workers, or do companies want cheap labor?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/codingbootcamp/comments/1eo07k7/is_there_really_a_shortage_of_tech_workers_or_do/) At this point, I am not sure if I believe that there\'s a million person shortage in cyber. It seems like companies keep doing layoffs and outsourcing jobs. 40 percent of millennials have a degree, and many new grads are now working high school jobs. It can\'t be an education shortage. Something else must be going on. You could easily build a company town and train and hire workers directly in the USA if you wanted to. \n\n\n\n [What piracy term took you the longest to understand?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/Piracy/comments/1erbd54/what_piracy_term_took_you_the_longest_to/) [r/Piracy](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/Piracy/) [r/Piracy](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/Piracy/) ⚓ Dedicated to the discussion of digital piracy, including ethical problems and legal advancements.\n\n1.7M Members 1.5K Online ADMIN MOD •\n\n<-time->\n 8 hr. ago\n\n-time->\n [What piracy term took you the longest to understand?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/Piracy/comments/1erbd54/what_piracy_term_took_you_the_longest_to/) DHT and swarms for me. \n\n\n\n [How would you guys utilize a spare domain?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/SEO/comments/1erm74s/how_would_you_guys_utilize_a_spare_domain/) [r/SEO](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/SEO/) [r/SEO](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/SEO/) Reddit\'s No.1 SEO Community!\n\nStay Up to Date with All Our Projects here:\n\n [SilverDegenClub Linktree](https://linktr.ee/silverdegenclub)\n\n [SilverWars Linktree](https://linktr.ee/mineralwealthofficial)\n\nSearch engine optimisation and all its wider facets. Your community for SEO news, tips and case studies.\n\n308K Members 40 Online Reddit\'s No.1 SEO Community!\n\nStay Up to Date with All Our Projects here:\n\n [SilverDegenClub Linktree](https://linktr.ee/silverdegenclub)\n\n [SilverWars Linktree](https://linktr.ee/mineralwealthofficial)\n\nADMIN MOD •\n\n<-time->\n 27 min. ago\n\n-time->\n [How would you guys utilize a spare domain?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/SEO/comments/1erm74s/how_would_you_guys_utilize_a_spare_domain/) So I have my main domain and website, theservicessold(dot)com and I have servicessold(dot)us \n\n Right now the one ending in us just redirects to my main site. I was wondering if this is what you guys would do, or if you\'d host a simple web page saying to visit the main state and get a free back link. I also thought about hosting a forum relating to the industry since it\'s lacking one at the moment. Then I get the free back link and probably #1 on Google when you search "services sold forum." \n\n Whatever I do will probably be hosted on my home server, so nothing too intensive ideally. \n\n Curious on your guys\' thoughts. \n\n\n\n [Got an offer for UI/UX designer role.](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/UI_Design/comments/1eq8blw/got_an_offer_for_uiux_designer_role/) [r/UI_Design](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/UI_Design/) [r/UI_Design](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/UI_Design/) User Interface Design (UI Design) is the design of user interfaces for the web and devices using design and typography principles with a focus on maximizing usability and the user experience.\n\n181K Members 8 Online ADMIN MOD •\n\n<-time->\n 2 days ago\n\n-time->\n• Because you\'ve shown interest in a similar community\n\n [Got an offer for UI/UX designer role.](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/UI_Design/comments/1eq8blw/got_an_offer_for_uiux_designer_role/) Got an offer for UI/UX designer role. \n\n I just wanted to know the freshers UI/UX designers salary. \n\n I got an offer from a design studio. They were paying me 25-30k a month. Remote: 5 days a week. 10Am -7 Pm \n\n Is it alright? \n\n Or exploiting? \n\n [u/thecouponnerd](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/user/thecouponnerd/) • Promoted LPT: Here\'s how I saved $1,300 this year in 15 minutes (probably more like 5, but I\'m being generous). It\'s basically an open secret in the industry right now, **but switching auto insurance carriers will typically save you ~$400-$500/year on avg.**\n\nYou should absolutely look into switching insurance companies once a year (potentially more often than that).\n\n(Sometimes you save a lot more than $400: **switching this year saved me $1,300 dollars.** )\n\n**Why does switching save you money?**\n\nAs you age, get married, etc., your insurance risk goes down, which means your rates should drop every year, right?\n\n**But of course, they don\'t drop.** Your current company has no incentive to let you pay less. They know you don\'t want to go through the hassle of switching.\n\n**Here’s how to quickly see how much you can save by switching (takes maybe 2 minutes):**\n\n- Pull up a 3rd party comparison site and look for better rates (I used Coverage.com, but Auto-Savings and Nerd Wallet are always solid too)\n- Answer the questions on the page\n- It’ll spit out a bunch of insurance quotes for you.\n\nThat’s literally it. You’ll likely save yourself a bunch of money.\n\nMost people (obviously) don\'t bother because it seems like a hassle, but it\'s pretty easy to switch carriers these days.\n\nBest of luck.\n\n**BIG NOTE: I\'m an affiliate of** Coverage.com **and Auto-Savings, I get paid when you sign up for insurance through their links, which helps me put out more free couponing/saving content (and buy elaborate toys for my cat). If you don\'t want me to get paid, just go to google and find a 3rd party insurance comparison site that way.**\n\n\n\n [Got YouTube tv base plan with YouTube premium, want 29$ per month](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/accountsharing/comments/1erm1kp/got_youtube_tv_base_plan_with_youtube_premium/) [r/accountsharing](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/accountsharing/) [r/accountsharing](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/accountsharing/) THIS SUB IS HEAVILY MODERATED. IGNORANCE OF THE RULES WILL NEVER BE ACCEPTED AS AN EXCUSE.\n\nA place to share your login information for PERSONAL accounts/services/sites that require registering, and in exchange get similar services for yourself.\n\n13K Members 5 Online ADMIN MOD •\n\n<-time->\n 34 min. ago\n\n-time->\n [Got YouTube tv base plan with YouTube premium, want 29$ per month](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/accountsharing/comments/1erm1kp/got_youtube_tv_base_plan_with_youtube_premium/) Dm if interested \n\n\n\n [What Keeps You Up at Night these days?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/SEO/comments/1er5cad/what_keeps_you_up_at_night_these_days/) [r/SEO](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/SEO/) [r/SEO](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/SEO/) Reddit\'s No.1 SEO Community!\n\nStay Up to Date with All Our Projects here:\n\n [SilverDegenClub Linktree](https://linktr.ee/silverdegenclub)\n\n [SilverWars Linktree](https://linktr.ee/mineralwealthofficial)\n\nSearch engine optimisation and all its wider facets. Your community for SEO news, tips and case studies.\n\n308K Members 40 Online Reddit\'s No.1 SEO Community!\n\nStay Up to Date with All Our Projects here:\n\n [SilverDegenClub Linktree](https://linktr.ee/silverdegenclub)\n\n [SilverWars Linktree](https://linktr.ee/mineralwealthofficial)\n\nADMIN MOD •\n\n<-time->\n 12 hr. ago\n\n-time->\n [What Keeps You Up at Night these days?](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/r/SEO/comments/1er5cad/what_keeps_you_up_at_night_these_days/) hi \n\n As a Growth Consultant and SEO, I\'m curious, what are the things that keep you up at night when it comes to your websites these days? \n\n Is it the ever-changing SERPs, loss of revenue, loss of rankings, loss of content performance, the pressure to stay ahead of the competition, knowing what google will do next, pressure of AI, knowing the vision for the future tactics or maybe something else entirely? \n\n'), Document(metadata={'BrowsingSessionId': '48573787', 'VisitedWebPageURL': 'https://x.com/home', 'VisitedWebPageTitle': 'Home / X', 'VisitedWebPageDateWithTimeInISOString': '2024-08-13T16:50:07-07:00', 'VisitedWebPageReffererURL': 'https://x.com/home', 'VisitedWebPageVisitDurationInMilliseconds': 10383, 'VisitedWebPageContent': None}, page_content='\n\n<-nav->\n [For you](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/home) [Following](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/home)\n\n-nav->\nSee new posts  What is happening?!\n\n\n<-nav->\n\n\n-nav->\nPost---\n\n\n# Your Home Timeline\n\n\n\n  [Madni Aghadi](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/hey_madni) [@hey_madni](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/hey_madni) · \n\n<-time->\n Aug 12\n\n-time->\n The Olympics just ended.\n\nHere are 12 key moments you must see:  1.5K 28K 487K [77M](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/hey_madni/status/1823022750681989140/analytics)\n\n  [ogug](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/ogug8) [@ogug8](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/ogug8) · \n\n<-time->\n 10h\n\n-time->\n Women who gave the “nice guy” a chance how it ended up. The replies bruh (Thread )  495 1.1K 11K [6.4M](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/ogug8/status/1823349885157945729/analytics)\n\n  [down bad comments](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/downbadcomment) [@downbadcomment](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/downbadcomment) · \n\n<-time->\n Aug 12\n\n-time->\n the most down bad moments of all time\n\na thread  462 7.2K 195K [24M](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/downbadcomment/status/1823088796952297815/analytics) \n## Pinned by people you follow\n\n \n\n---\n\n---\n\n\n# Carousel\n\n\n\n<-nav->\n Quote Elon Musk @elonmusk ·\n\n<-time->\n 17h\n\n-time->\nCombined views of the conversation with @realDonaldTrump and subsequent discussion by other accounts now ~1 billion x.com/x/status/18232… Quote Dexerto @Dexerto ·\n\n<-time->\n 6h\n\n-time->\n    There’s now a “Rawdog Flight Simulator” that uses eye-tracking \n\nPlayers simply stare at the back of a plane seat for the flight’s duration\n\n-nav->\n\n\n---\n\n\n\n  [Manish Kumar](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/manishkhosiya) [@manishkhosiya](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/manishkhosiya) · \n\n<-time->\n 13h\n\n-time->\n This is Heart-breaking !\n\nPlane Crashes caught on video.\n\nDon’t open if you are soft hearted  75 750 8.7K [6.9M](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/manishkhosiya/status/1823299666139848750/analytics)\n## Who to follow\n\n  [NumerologyFact](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/Lifepathfacts) [@Lifepathfacts](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/Lifepathfacts) Follow Click to Follow Lifepathfacts Statistics powered by [@cuetheapp](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/cuetheapp) Narendra Modi and 2 others follow  [Joe Biden](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/JoeBiden) [@JoeBiden](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/JoeBiden) Follow Click to Follow JoeBiden Husband to [@DrBiden](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/DrBiden) , proud father and grandfather. Ready to finish the job for all Americans. Official account is [@POTUS](chrome-extension://jbckbhoeahlefbjdofonogamgbjjooca/POTUS).')]
\ No newline at end of file
diff --git a/backend/HIndices.py b/backend/HIndices.py
index c851203..4c97b9e 100644
--- a/backend/HIndices.py
+++ b/backend/HIndices.py
@@ -1,5 +1,5 @@
-import asyncio
from datetime import datetime
+import json
from typing import List
from gpt_researcher import GPTResearcher
from langchain_chroma import Chroma
@@ -10,18 +10,19 @@ from langchain.docstore.document import Document
from langchain_experimental.text_splitter import SemanticChunker
from langchain.retrievers import ContextualCompressionRetriever
from langchain.retrievers.document_compressors import FlashrankRerank
+import numpy as np
from sqlalchemy.orm import Session
-from fastapi import Depends
+from fastapi import Depends, WebSocket
from langchain_core.prompts import PromptTemplate
import os
from dotenv import load_dotenv
+from Utils.stringify import stringify
from pydmodels import AIAnswer, Reference
from database import SessionLocal
-from models import Documents, User
-from prompts import CONTEXT_ANSWER_PROMPT
+from models import Documents
load_dotenv()
SMART_LLM = os.environ.get("SMART_LLM")
@@ -43,6 +44,26 @@ def get_db():
yield db
finally:
db.close()
+
+class NumpyEncoder(json.JSONEncoder):
+ def default(self, obj):
+ if isinstance(obj, np.ndarray):
+ return obj.tolist()
+ return super().default(obj)
+
+class ConnectionManager:
+ def __init__(self):
+ self.active_connections: List[WebSocket] = []
+
+ async def connect(self, websocket: WebSocket):
+ await websocket.accept()
+ self.active_connections.append(websocket)
+
+ def disconnect(self, websocket: WebSocket):
+ self.active_connections.remove(websocket)
+
+ async def send_personal_message(self, message: str, websocket: WebSocket):
+ await websocket.send_text(message)
class HIndices:
@@ -74,7 +95,8 @@ class HIndices:
def summarize_file_doc(self, page_no, doc, search_space):
report_template = """
- You are an eagle-eyed researcher, skilled at summarizing lengthy documents with precision and clarity.
+ You are an eagle-eyed researcher, skilled at summarizing lengthy documents with precision and clarity.
+
I would like you to assist me in summarizing the following text. Please create a comprehensive summary that captures the main ideas, key details, and essential arguments presented in the text. Your summary should adhere to the following guidelines:
Length and Depth: Provide a detailed summary that is approximately [insert desired word count or length, e.g., 300-500 words]. Ensure that it is thorough enough to convey the core message without losing important nuances.
@@ -122,7 +144,6 @@ class HIndices:
# metadict['languages'] = metadict['languages'][0]
return Document(
- id=str(page_no),
page_content=response,
metadata=metadict
)
@@ -141,14 +162,11 @@ class HIndices:
# metadict['languages'] = metadict['languages'][0]
return Document(
- id=str(page_no),
page_content=response.content,
metadata=metadict
)
def summarize_webpage_doc(self, page_no, doc, search_space):
-
-
report_template = """
You are an eagle-eyed researcher, skilled at summarizing lengthy documents with precision and clarity.
I would like you to assist me in summarizing the following text. Please create a comprehensive summary that captures the main ideas, key details, and essential arguments presented in the text. Your summary should adhere to the following guidelines:
@@ -187,7 +205,6 @@ class HIndices:
response = report_chain.invoke({"document": doc})
return Document(
- id=str(page_no),
page_content=response,
metadata={
"filetype": 'WEBPAGE',
@@ -207,7 +224,6 @@ class HIndices:
response = report_chain.invoke({"document": doc})
return Document(
- id=str(page_no),
page_content=response.content,
metadata={
"filetype": 'WEBPAGE',
@@ -223,29 +239,17 @@ class HIndices:
}
)
- def encode_docs_hierarchical(self, documents, files_type, search_space='GENERAL', db: Session = Depends(get_db)):
+ def encode_docs_hierarchical(self, documents, search_space_instance, files_type, db: Session = Depends(get_db)):
"""
Creates and Saves/Updates docs in hierarchical indices and postgres table
"""
-
- prev_doc_idx = len(documents) + 1
- # #Save docs in PG
- user = db.query(User).filter(User.username == self.username).first()
-
- if(len(user.documents) < prev_doc_idx):
- summary_last_id = 0
- detail_id_counter = 0
- else:
- summary_last_id = int(user.documents[-prev_doc_idx].id)
- detail_id_counter = int(user.documents[-prev_doc_idx].desc_vector_end)
-
-
+ page_no_offset = len(self.detailed_store.get()['documents'])
# Process documents
summaries = []
if(files_type=='WEBPAGE'):
- batch_summaries = [self.summarize_webpage_doc(page_no = i + summary_last_id, doc=doc, search_space=search_space) for i, doc in enumerate(documents)]
+ batch_summaries = [self.summarize_webpage_doc(page_no = i + page_no_offset, doc=doc, search_space=search_space_instance.name) for i, doc in enumerate(documents)]
else:
- batch_summaries = [self.summarize_file_doc(page_no = i + summary_last_id, doc=doc, search_space=search_space) for i, doc in enumerate(documents)]
+ batch_summaries = [self.summarize_file_doc(page_no = i + page_no_offset , doc=doc, search_space=search_space_instance.name) for i, doc in enumerate(documents)]
summaries.extend(batch_summaries)
@@ -254,21 +258,37 @@ class HIndices:
for i, summary in enumerate(summaries):
+ # Add single summary in vector store
+ added_doc_id = self.summary_store.add_documents(filter_complex_metadata([summary]))
+
+ if(files_type=='WEBPAGE'):
+ new_pg_doc = Documents(
+ title=summary.metadata['VisitedWebPageTitle'],
+ document_metadata=stringify(summary.metadata),
+ page_content=documents[i].page_content,
+ file_type='WEBPAGE',
+ summary_vector_id=added_doc_id[0],
+ )
+ else:
+ new_pg_doc = Documents(
+ title=summary.metadata['filename'],
+ document_metadata=stringify(summary.metadata),
+ page_content=documents[i].page_content,
+ file_type=summary.metadata['filetype'],
+ summary_vector_id=added_doc_id[0],
+ )
+
+ # Store it in PG
+ search_space_instance.documents.append(new_pg_doc)
+ db.commit()
+
# Semantic chucking for better contexual compression
text_splitter = SemanticChunker(embeddings=self.embeddings)
chunks = text_splitter.split_documents([documents[i]])
- user.documents[-(len(summaries) - i)].desc_vector_start = detail_id_counter
- user.documents[-(len(summaries) - i)].desc_vector_end = detail_id_counter + len(chunks)
-
-
- db.commit()
-
# Update metadata for detailed chunks
for i, chunk in enumerate(chunks):
- chunk.id = str(detail_id_counter)
chunk.metadata.update({
- "chunk_id": detail_id_counter,
"summary": False,
"page": summary.metadata['page'],
})
@@ -297,27 +317,15 @@ class HIndices:
chunk.page_content = ieee_content
- detail_id_counter += 1
-
detailed_chunks.extend(chunks)
#update vector stores
- self.summary_store.add_documents(filter_complex_metadata(summaries))
self.detailed_store.add_documents(filter_complex_metadata(detailed_chunks))
return self.summary_store, self.detailed_store
def delete_vector_stores(self, summary_ids_to_delete: list[str], db: Session = Depends(get_db)):
- self.summary_store.delete(ids=summary_ids_to_delete)
- for id in summary_ids_to_delete:
- summary_entry = db.query(Documents).filter(Documents.id == int(id) + 1).first()
-
- desc_ids_to_del = [str(id) for id in range(summary_entry.desc_vector_start, summary_entry.desc_vector_end)]
-
- self.detailed_store.delete(ids=desc_ids_to_del)
- db.delete(summary_entry)
- db.commit()
-
+ self.summary_store.delete(ids=summary_ids_to_delete)
return "success"
def summary_vector_search(self,query, search_space='GENERAL'):
@@ -344,7 +352,7 @@ class HIndices:
unique_refs = {}
id_mapping = {
ref.id: unique_refs.setdefault(
- ref.url, Reference(id=str(len(unique_refs) + 1), title=ref.title, url=ref.url)
+ ref.source, Reference(id=str(len(unique_refs) + 1), title=ref.title, source=ref.source)
).id
for ref in references
}
@@ -356,25 +364,131 @@ class HIndices:
return updated_answer, list(unique_refs.values())
- async def get_vectorstore_report(self, query: str, report_type: str, report_source: str, documents: List[Document]) -> str:
- researcher = GPTResearcher(query=query, report_type=report_type, report_source=report_source, documents=documents, report_format="IEEE")
+ async def ws_get_vectorstore_report(self, query: str, report_type: str, report_source: str, documents: List[Document],websocket: WebSocket) -> str:
+ researcher = GPTResearcher(query=query, report_type=report_type, report_source=report_source, documents=documents, report_format="APA",websocket=websocket)
await researcher.conduct_research()
report = await researcher.write_report()
return report
- async def get_web_report(self, query: str, report_type: str, report_source: str) -> str:
- researcher = GPTResearcher(query=query, report_type=report_type, report_source=report_source, report_format="IEEE")
+ async def ws_get_web_report(self, query: str, report_type: str, report_source: str, websocket: WebSocket) -> str:
+ researcher = GPTResearcher(query=query, report_type=report_type, report_source=report_source, report_format="APA",websocket=websocket)
await researcher.conduct_research()
report = await researcher.write_report()
return report
- def new_search(self, query, search_space='GENERAL'):
- report_type = "custom_report"
- report_source = "langchain_documents"
- contextdocs = []
+ async def ws_experimental_search(self, websocket: WebSocket, manager: ConnectionManager , query, search_space='GENERAL', report_type = "custom_report", report_source = "langchain_documents"):
+ custom_prompt = """
+ Please answer the following user query using only the **Document Page Content** provided below, while citing sources exclusively from the **Document Metadata** section, in the format shown. **Do not add any external information.**
+
+ **USER QUERY:** """ + query + """
+
+ **Answer Requirements:**
+ - Provide a detailed long response using IEEE-style in-text citations (e.g., [1], [2]) based solely on the **Document Page Content**.
+ - Use **Document Metadata** only for citation details and format each reference exactly once, with no duplicates.
+ - Structure references in this format at the end of your response, using this format: (Access Date and Time). [Title or Filename](Source)
+ FOR EXAMPLE:
+ EXAMPLE User Query : Explain the impact of artificial intelligence on modern healthcare.
+
+ EXAMPLE Given Documents:
+ =======================================DOCUMENT METADATA==================================== \n"
+ Source: https://www.reddit.com/r/ChatGPT/comments/13na8yp/highly_effective_prompt_for_summarizing_gpt4/ \n
+ Title: Artificial intelligence\n
+ Visited Date and Time : 2024-10-23T22:44:03-07:00 \n
+ ============================DOCUMENT PAGE CONTENT CHUNK===================================== \n
+ Page Content Chunk: \n\nArtificial intelligence (AI) has significantly transformed modern healthcare by enhancing diagnostic accuracy, personalizing patient care, and optimizing operational efficiency. AI algorithms can analyze vast datasets to identify patterns that may be missed by human practitioners, leading to improved diagnostic outcomes. \n\n
+ ===================================================================================== \n
+
+
+ =======================================DOCUMENT METADATA==================================== \n"
+ Source: https://github.com/MODSetter/SurfSense \n
+ Title: MODSetter/SurfSense: Personal AI Assistant for Internet Surfers and Researchers. \n
+ Visited Date and Time : 2024-10-23T22:44:03-07:00 \n
+ ============================DOCUMENT PAGE CONTENT CHUNK===================================== \n
+ Page Content Chunk: \n\nAI systems have been deployed in radiology to detect anomalies in medical imaging with high precision, reducing the risk of misdiagnosis and improving patient outcomes. Additionally, AI-powered chatbots and virtual assistants are being used to provide 24/7 support, answer queries, and offer personalized health advice\n\n
+ ===================================================================================== \n
+
+
+ =======================================DOCUMENT METADATA==================================== \n"
+ Source: https://github.com/MODSetter/SurfSense \n
+ Title: MODSetter/SurfSense: Personal AI Assistant for Internet Surfers and Researchers. \n
+ Visited Date and Time : 2024-10-23T22:44:03-07:00 \n
+ ============================DOCUMENT PAGE CONTENT CHUNK===================================== \n
+ Page Content Chunk: \n\nAI algorithms can analyze a patient's genetic information to predict their risk of certain diseases and recommend tailored treatment plans. \n\n
+ ===================================================================================== \n
+
+
+ =======================================DOCUMENT METADATA==================================== \n"
+ Source: filename.pdf \n
+ ============================DOCUMENT PAGE CONTENT CHUNK===================================== \n
+ Page Content Chunk: \n\nApart from diagnostics, AI-driven tools facilitate personalized treatment plans by considering individual patient data, thereby improving patient outcomes\n\n
+ ===================================================================================== \n
+
+
+
+ Ensure your response is structured something like this:
+ **OUTPUT FORMAT:**
+ ---
+ **Answer:**
+ Artificial intelligence (AI) has significantly transformed modern healthcare by enhancing diagnostic accuracy, personalizing patient care, and optimizing operational efficiency. AI algorithms can analyze vast datasets to identify patterns that may be missed by human practitioners, leading to improved diagnostic outcomes [1]. For instance, AI systems have been deployed in radiology to detect anomalies in medical imaging with high precision [2]. Moreover, AI-driven tools facilitate personalized treatment plans by considering individual patient data, thereby improving patient outcomes [3].
+
+ **References:**
+ 1. (2024, October 23). [Artificial intelligence — GPT-4 Optimized: r/ChatGPT](https://www.reddit.com/r/ChatGPT/comments/13na8yp/highly_effective_prompt_for_summarizing_gpt4)
+ 2. (2024, October 23). [MODSetter/SurfSense: Personal AI Assistant for Internet Surfers and Researchers](https://github.com/MODSetter/SurfSense)
+ 3. (2024, October 23). [filename.pdf](filename.pdf)
+
+ ---
+
+ """
+
+ structured_llm = self.llm.with_structured_output(AIAnswer)
+
+ if report_source == "web" :
+ if report_type == "custom_report" :
+ ret_report = await self.ws_get_web_report(query=custom_prompt, report_type=report_type, report_source="web", websocket=websocket)
+ else:
+ ret_report = await self.ws_get_web_report(
+ query=query,
+ report_type=report_type,
+ report_source="web",
+ websocket=websocket
+ )
+ await manager.send_personal_message(
+ json.dumps({"type": "stream", "content": "Converting to IEEE format..."}),
+ websocket
+ )
+ ret_report = self.llm.invoke("I have a report written in APA format. Please convert it to IEEE format, ensuring that all citations, references, headings, and overall formatting adhere to the IEEE style guidelines. Maintain the original content and structure while applying the correct IEEE formatting rules. Just return the converted report thats it. NOW MY REPORT : " + ret_report).content
+
+
+
+
+
+
+ for chuck in structured_llm.stream(
+ "Please extract and separate the references from the main text. "
+ "References are formatted as follows:"
+ "[Reference Id]. (Access Date and Time). [Title or Filename](Source or URL). "
+ "Provide the text and references as distinct outputs. "
+ "IMPORTANT : Never hallucinate the references. If there is no reference just return nothing in the reference field."
+ "Here is the content to process: \n\n\n" + ret_report):
+ # ans, sources = self.deduplicate_references_and_update_answer(answer=chuck.answer, references=chuck.references)
+
+ await manager.send_personal_message(
+ json.dumps({"type": "stream", "sources": [source.model_dump() for source in chuck.references]}),
+ websocket
+ )
+
+ await manager.send_personal_message(
+ json.dumps({"type": "stream", "content": ret_report}),
+ websocket
+ )
+
+ return
+
+
+ contextdocs = []
top_summaries_compressor = FlashrankRerank(top_n=5)
details_compressor = FlashrankRerank(top_n=50)
top_summaries_retreiver = ContextualCompressionRetriever(
@@ -383,6 +497,13 @@ class HIndices:
top_summaries_compressed_docs = top_summaries_retreiver.invoke(query)
+ rel_docs = filter_complex_metadata(top_summaries_compressed_docs)
+
+ await manager.send_personal_message(
+ json.dumps({"type": "stream", "relateddocs": [relateddoc.model_dump() for relateddoc in rel_docs]}, cls=NumpyEncoder),
+ websocket
+ )
+
for summary in top_summaries_compressed_docs:
# For each summary, retrieve relevant detailed chunks
page_number = summary.metadata["page"]
@@ -396,66 +517,45 @@ class HIndices:
)
contextdocs.extend(detailed_compressed_docs)
+
+
+
- custom_prompt = """
- Please answer the following user query in the format shown below, using in-text citations and IEEE-style references based on the provided documents.
- USER QUERY : """+ query +"""
-
- Ensure the answer includes:
- - A detailed yet concise explanation with IEEE-style in-text citations (e.g., [1], [2]).
- - A list of non-duplicated sources only from document's metadata not document's page content at the end, following IEEE format.
- - Where applicable, provide sources in the text to back up key points.
- - Reference should follow this format : (Access Date and Time). [Title or Filename](Source)
-
- FOR EXAMPLE:
- User Query : Explain the impact of artificial intelligence on modern healthcare.
-
- Given Documents:
- =======================================DOCUMENT METADATA==================================== \n"
- Source: https://www.reddit.com/r/ChatGPT/comments/13na8yp/highly_effective_prompt_for_summarizing_gpt4/ \n
- Title: Artificial intelligence\n
- Visited Date and Time : 2024-10-23T22:44:03-07:00 \n
- ============================DOCUMENT PAGE CONTENT CHUNK===================================== \n
- Page Content Chunk: \n\nArtificial intelligence (AI) has significantly transformed modern healthcare by enhancing diagnostic accuracy, personalizing patient care, and optimizing operational efficiency. AI algorithms can analyze vast datasets to identify patterns that may be missed by human practitioners, leading to improved diagnostic outcomes. \n\n
- ===================================================================================== \n
- =======================================DOCUMENT METADATA==================================== \n"
- Source: https://github.com/MODSetter/SurfSense \n
- Title: MODSetter/SurfSense: Personal AI Assistant for Internet Surfers and Researchers. \n
- Visited Date and Time : 2024-10-23T22:44:03-07:00 \n
- ============================DOCUMENT PAGE CONTENT CHUNK===================================== \n
- Page Content Chunk: \n\nAI systems have been deployed in radiology to detect anomalies in medical imaging with high precision, reducing the risk of misdiagnosis and improving patient outcomes. Additionally, AI-powered chatbots and virtual assistants are being used to provide 24/7 support, answer queries, and offer personalized health advice\n\n
- ===================================================================================== \n
- =======================================DOCUMENT METADATA==================================== \n"
- Source: filename.pdf \n
- ============================DOCUMENT PAGE CONTENT CHUNK===================================== \n
- Page Content Chunk: \n\nApart from diagnostics, AI-driven tools facilitate personalized treatment plans by considering individual patient data, thereby improving patient outcomes\n\n
- ===================================================================================== \n
+ # local_report = asyncio.run(self.get_vectorstore_report(query=custom_prompt, report_type=report_type, report_source=report_source, documents=contextdocs))
+ if report_source == "langchain_documents" :
+ if report_type == "custom_report" :
+ ret_report = await self.ws_get_vectorstore_report(query=custom_prompt, report_type=report_type, report_source=report_source, documents=contextdocs, websocket=websocket)
+ else:
+ ret_report = await self.ws_get_vectorstore_report(query=query, report_type=report_type, report_source=report_source, documents=contextdocs, websocket=websocket)
+ await manager.send_personal_message(
+ json.dumps({"type": "stream", "content": "Converting to IEEE format..."}),
+ websocket
+ )
+ ret_report = self.llm.invoke("I have a report written in APA format. Please convert it to IEEE format, ensuring that all citations, references, headings, and overall formatting adhere to the IEEE style guidelines. Maintain the original content and structure while applying the correct IEEE formatting rules. Just return the converted report thats it. NOW MY REPORT : " + ret_report).content
+
+
+ for chuck in structured_llm.stream(
+ "Please extract and separate the references from the main text. "
+ "References are formatted as follows:"
+ "[Reference Id]. (Access Date and Time). [Title or Filename](Source or URL). "
+ "Provide the text and references as distinct outputs. "
+ "Ensure that in-text citation numbers such as [1], [2], (1), (2), etc., as well as in-text links or in-text citation links within the content, remain unaltered and are accurately extracted."
+ "IMPORTANT : Never hallucinate the references. If there is no reference just return nothing in the reference field."
+ "Here is the content to process: \n\n\n" + ret_report):
+ ans, sources = self.deduplicate_references_and_update_answer(answer=chuck.answer, references=chuck.references)
+
+ await manager.send_personal_message(
+ json.dumps({"type": "stream", "sources": [source.model_dump() for source in sources]}),
+ websocket
+ )
-
-
- Ensure your response is structured something like this:
- ---
- **Answer:**
- Artificial intelligence (AI) has significantly transformed modern healthcare by enhancing diagnostic accuracy, personalizing patient care, and optimizing operational efficiency. AI algorithms can analyze vast datasets to identify patterns that may be missed by human practitioners, leading to improved diagnostic outcomes [1]. For instance, AI systems have been deployed in radiology to detect anomalies in medical imaging with high precision [2]. Moreover, AI-driven tools facilitate personalized treatment plans by considering individual patient data, thereby improving patient outcomes [3].
+ await manager.send_personal_message(
+ json.dumps({"type": "stream", "content": ans}),
+ websocket
+ )
+
- **References:**
- 1. (2024, October 23). [Artificial intelligence — GPT-4 Optimized: r/ChatGPT.](https://www.reddit.com/r/ChatGPT/comments/13na8yp/highly_effective_prompt_for_summarizing_gpt4/)
- 2. (2024, October 23). [MODSetter/SurfSense: Personal AI Assistant for Internet Surfers and Researchers.](https://github.com/MODSetter/SurfSense)
- 3. (2024, October 23). [filename.pdf](filename.pdf)
-
- ---
-
- """
- local_report = asyncio.run(self.get_vectorstore_report(query=custom_prompt, report_type=report_type, report_source=report_source, documents=contextdocs))
+ return
- # web_report = asyncio.run(get_web_report(query=custom_prompt, report_type=report_type, report_source="web"))
-
- # structured_llm = self.llm.with_structured_output(AIAnswer)
-
- # out = structured_llm.invoke("Extract exact(i.e without changing) answer string and references information from : \n\n\n" + local_report)
-
- # mod_out = self.deduplicate_references_and_update_answer(answer=out.answer, references=out.references)
-
- return local_report
\ No newline at end of file
diff --git a/backend/models.py b/backend/models.py
index d6301f2..ba55c6c 100644
--- a/backend/models.py
+++ b/backend/models.py
@@ -1,7 +1,7 @@
from datetime import datetime
-from typing import List
+# from typing import List
from database import Base, engine
-from sqlalchemy import Column, DateTime, ForeignKey, Integer, String, create_engine
+from sqlalchemy import Column, DateTime, ForeignKey, Integer, String, Boolean, create_engine
from sqlalchemy.orm import relationship
class BaseModel(Base):
@@ -18,8 +18,8 @@ class Chat(BaseModel):
title = Column(String)
chats_list = Column(String)
- user_id = Column(ForeignKey('users.id'))
- user = relationship('User')
+ search_space_id = Column(Integer, ForeignKey('searchspaces.id'))
+ search_space = relationship('SearchSpace', back_populates='chats')
class Documents(BaseModel):
@@ -31,30 +31,50 @@ class Documents(BaseModel):
file_type = Column(String)
document_metadata = Column(String)
page_content = Column(String)
- desc_vector_start = Column(Integer, default=0)
- desc_vector_end = Column(Integer, default=0)
- search_space_id = Column(ForeignKey('searchspaces.id'))
- search_space = relationship('SearchSpace')
+ summary_vector_id = Column(String)
+
+ search_space_id = Column(Integer, ForeignKey("searchspaces.id"))
+ search_space = relationship("SearchSpace", back_populates="documents")
+
+
+class Podcast(BaseModel):
+ __tablename__ = "podcasts"
+
+ title = Column(String)
+ created_at = Column(DateTime, default=datetime.now)
+ is_generated = Column(Boolean, default=False)
+ podcast_content = Column(String, default="")
+ file_location = Column(String, default="")
+
+ search_space_id = Column(Integer, ForeignKey("searchspaces.id"))
+ search_space = relationship("SearchSpace", back_populates="podcasts")
+
- user_id = Column(ForeignKey('users.id'))
- user = relationship('User')
class SearchSpace(BaseModel):
__tablename__ = "searchspaces"
- search_space = Column(String, unique=True)
+ name = Column(String, index=True)
+ description = Column(String)
+ created_at = Column(DateTime, default=datetime.now)
- documents = relationship(Documents)
+ user_id = Column(Integer, ForeignKey("users.id"))
+ user = relationship("User", back_populates="search_spaces")
+
+ documents = relationship("Documents", back_populates="search_space", order_by="Documents.id")
+ podcasts = relationship("Podcast", back_populates="search_space", order_by="Podcast.id")
+
+ chats = relationship('Chat', back_populates='search_space', order_by='Chat.id')
class User(BaseModel):
__tablename__ = "users"
username = Column(String, unique=True, index=True)
hashed_password = Column(String)
- chats = relationship(Chat, order_by="Chat.id")
- documents = relationship(Documents, order_by="Documents.id")
+
+ search_spaces = relationship("SearchSpace", back_populates="user")
# Create the database tables if they don't exist
-User.metadata.create_all(bind=engine)
+User.metadata.create_all(bind=engine)
\ No newline at end of file
diff --git a/backend/prompts.py b/backend/prompts.py
index c97577e..bc4099a 100644
--- a/backend/prompts.py
+++ b/backend/prompts.py
@@ -1,75 +1,6 @@
-from langchain_core.prompts.prompt import PromptTemplate
+# Need to move new prompts to here will move after testing some more
+
+# from langchain_core.prompts.prompt import PromptTemplate
from datetime import datetime, timezone
-
-
-
-DATE_TODAY = "Today's date is " + datetime.now(timezone.utc).astimezone().isoformat() + '\n'
-
-# Create a prompt template for sub-query decomposition
-SUBQUERY_DECOMPOSITION_TEMPLATE = DATE_TODAY + """You are an AI assistant tasked with breaking down complex queries into simpler sub-queries for a vector store.
-Given the original query, decompose it into 2-4 simpler sub-queries for vector search that helps in expanding context.
-
-Original query: {original_query}
-
-IMPORTANT INSTRUCTION: Make sure to only return sub-queries and no explanation.
-
-EXAMPLE:
-
-User Query: What are the impacts of climate change on the environment?
-
-AI Answer:
-What are the impacts of climate change on biodiversity?
-How does climate change affect the oceans?
-What are the effects of climate change on agriculture?
-What are the impacts of climate change on human health?
-"""
-
-# SUBQUERY_DECOMPOSITION_TEMPLATE_TWO = DATE_TODAY + """You are an AI language model assistant. Your task is to generate five
-# different versions of the given user question to retrieve relevant documents from a vector
-# database. By generating multiple perspectives on the user question, your goal is to help
-# the user overcome some of the limitations of the distance-based similarity search.
-# Provide these alternative questions separated by newlines.
-# Original question: {original_query}"""
-
-
-SUBQUERY_DECOMPOSITION_PROMT = PromptTemplate(
- input_variables=["original_query"],
- template=SUBQUERY_DECOMPOSITION_TEMPLATE
-)
-
-CONTEXT_ANSWER_TEMPLATE = DATE_TODAY + """You are a phd in english litrature. You are given the task to give detailed research report and explanation to the user query based on the given context.
-
-IMPORTANT INSTRUCTION: Only return answer if you can find it in given context otherwise just say you don't know.
-
-Context: {context}
-
-User Query: {query}
-Detailed Report:"""
-
-ANSWER_WITH_CITATIONS = DATE_TODAY + """You're a helpful AI assistant. Given a user question and some Webpage article snippets, \
-answer the user question and provide citations. If none of the articles answer the question, just say you don't know.
-
-Remember, you must return both an answer and citations. Citation information is given in Document Metadata.
-
-Here are the Webpage article snippets:
-{context}
-
-User Query: {query}
-Your Answer:"""
-
-
-CONTEXT_ANSWER_PROMPT = PromptTemplate(
- input_variables=["context","query"],
- template=ANSWER_WITH_CITATIONS
-)
-
-
-
-
-
-
-
-
-
-
+DATE_TODAY = "Today's date is " + datetime.now(timezone.utc).astimezone().isoformat() + '\n'
\ No newline at end of file
diff --git a/backend/pydmodels.py b/backend/pydmodels.py
index c95e082..14000bf 100644
--- a/backend/pydmodels.py
+++ b/backend/pydmodels.py
@@ -1,3 +1,4 @@
+# This have many unused shit will clean in future
from pydantic import BaseModel, Field
from typing import List, Optional
@@ -24,15 +25,28 @@ class DocMeta(BaseModel):
# VisitedWebPageContent: Optional[str] = Field(default=None, description="Visited WebPage Content in markdown of Document")
+class CreatePodcast(BaseModel):
+ token: str
+ search_space_id: int
+ title: str
+ wordcount: int
+ podcast_content: str
+
+
+class CreateStorageSpace(BaseModel):
+ name: str
+ description: str
+ token : str
+
class Reference(BaseModel):
id: str = Field(..., description="reference no")
- title: str = Field(..., description="reference title")
- url: str = Field(..., description="reference url")
+ title: str = Field(..., description="reference title.")
+ source: str = Field(..., description="reference Source or URL. Prefer URL only include file names if no URL available.")
class AIAnswer(BaseModel):
- answer: str = Field(..., description="Given Answer including its intext citation no's like [1], [2] etc.")
+ answer: str = Field(..., description="The provided answer, excluding references, but including in-text citation numbers such as [1], [2], (1), (2), etc.")
references: List[Reference] = Field(..., description="References")
@@ -42,13 +56,16 @@ class DocWithContent(BaseModel):
class DocumentsToDelete(BaseModel):
ids_to_delete: List[str]
- openaikey: str
token: str
class UserQuery(BaseModel):
query: str
search_space: str
- openaikey: str
+ token: str
+
+class MainUserQuery(BaseModel):
+ query: str
+ search_space: str
token: str
class ChatHistory(BaseModel):
@@ -58,7 +75,6 @@ class ChatHistory(BaseModel):
class UserQueryWithChatHistory(BaseModel):
chat: List[ChatHistory]
query: str
- openaikey: str
token: str
class DescriptionResponse(BaseModel):
@@ -70,14 +86,18 @@ class RetrivedDocListItem(BaseModel):
class RetrivedDocList(BaseModel):
documents: List[RetrivedDocListItem]
- search_space: str | None
- openaikey: str
+ search_space_id: int
token: str
class UserQueryResponse(BaseModel):
response: str
relateddocs: List[DocWithContent]
+class NewUserQueryResponse(BaseModel):
+ response: str
+ sources: List[Reference]
+ relateddocs: List[DocWithContent]
+
class NewUserChat(BaseModel):
token: str
type: str
diff --git a/backend/requirements.txt b/backend/requirements.txt
index 3f4438b..598613e 100644
--- a/backend/requirements.txt
+++ b/backend/requirements.txt
@@ -24,4 +24,7 @@ gpt_researcher
langgraph-cli
weasyprint
json5
-loguru
\ No newline at end of file
+loguru
+ffmpeg
+podcastfy
+wsproto
\ No newline at end of file
diff --git a/backend/server.py b/backend/server.py
index f8f5b8b..02f9a28 100644
--- a/backend/server.py
+++ b/backend/server.py
@@ -1,27 +1,32 @@
from __future__ import annotations
+import asyncio
+import json
+from typing import List
+
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.documents import Document
from langchain_ollama import OllamaLLM
from langchain_openai import ChatOpenAI
from sqlalchemy import insert
-from prompts import DATE_TODAY
-from pydmodels import ChatToUpdate, DescriptionResponse, DocWithContent, DocumentsToDelete, NewUserChat, UserCreate, UserQuery, RetrivedDocList, UserQueryResponse, UserQueryWithChatHistory
from langchain_core.messages import HumanMessage, SystemMessage, AIMessage
from langchain_unstructured import UnstructuredLoader
-#Heirerical Indices class
-from HIndices import HIndices
+# OUR LIBS
+from HIndices import ConnectionManager, HIndices
from Utils.stringify import stringify
+from prompts import DATE_TODAY
+from pydmodels import ChatToUpdate, CreatePodcast, CreateStorageSpace, DescriptionResponse, DocWithContent, DocumentsToDelete, MainUserQuery, NewUserChat, NewUserQueryResponse, UserCreate, UserQuery, RetrivedDocList, UserQueryResponse, UserQueryWithChatHistory
+from podcastfy.client import generate_podcast
# Auth Libs
-from fastapi import FastAPI, Depends, Form, HTTPException, status, UploadFile
+from fastapi import FastAPI, Depends, Form, HTTPException, Response, WebSocket, status, UploadFile, BackgroundTasks
from sqlalchemy.orm import Session
from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
from jose import JWTError, jwt
from datetime import datetime, timedelta
from passlib.context import CryptContext
-from models import Chat, Documents, SearchSpace, User
+from models import Chat, Documents, Podcast, SearchSpace, User
from database import SessionLocal
from fastapi.middleware.cors import CORSMiddleware
@@ -30,6 +35,7 @@ import os
from dotenv import load_dotenv
load_dotenv()
+OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
SMART_LLM = os.environ.get("SMART_LLM")
IS_LOCAL_SETUP = True if SMART_LLM.startswith("ollama") else False
ACCESS_TOKEN_EXPIRE_MINUTES = int(os.environ.get("ACCESS_TOKEN_EXPIRE_MINUTES"))
@@ -45,6 +51,7 @@ def extract_model_name(model_string: str) -> tuple[str, str]:
MODEL_NAME = extract_model_name(SMART_LLM)
app = FastAPI()
+manager = ConnectionManager()
# Dependency
def get_db():
@@ -53,315 +60,9 @@ def get_db():
yield db
finally:
db.close()
-
+
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")
-
-@app.post("/uploadfiles/")
-async def upload_files(files: list[UploadFile], token: str = Depends(oauth2_scheme), search_space: str = Form(...), api_key: str = Form(...), db: Session = Depends(get_db)):
- try:
- # Decode and verify the token
- payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
- username: str = payload.get("sub")
- if username is None:
- raise HTTPException(status_code=403, detail="Token is invalid or expired")
-
- docs = []
-
- for file in files:
-
- loader = UnstructuredLoader(
- file=file.file,
- api_key=UNSTRUCTURED_API_KEY,
- partition_via_api=True,
- chunking_strategy="basic",
- max_characters=90000,
- include_orig_elements=False,
- )
-
- filedocs = loader.load()
-
- fileswithfilename = []
- for f in filedocs:
- temp = f
- temp.metadata['filename'] = file.filename
- fileswithfilename.append(temp)
-
- docs.extend(fileswithfilename)
-
- # Initialize containers for documents and entries
- DocumentPgEntry = []
- raw_documents = []
-
- # Fetch the search space from the database or create it if it doesn't exist
- searchspace = db.query(SearchSpace).filter(SearchSpace.search_space == search_space.upper()).first()
- if not searchspace:
- stmt = insert(SearchSpace).values(search_space=search_space.upper())
- db.execute(stmt)
- db.commit()
-
- # Process each document in the retrieved document list
- for doc in docs:
-
- raw_documents.append(Document(page_content=doc.page_content, metadata=doc.metadata))
-
- # Stringify the document metadata
- pgdocmeta = stringify(doc.metadata)
-
- DocumentPgEntry.append(Documents(
- file_type=doc.metadata['filetype'],
- title=doc.metadata['filename'],
- search_space=db.query(SearchSpace).filter(SearchSpace.search_space == search_space.upper()).first(),
- document_metadata=pgdocmeta,
- page_content=doc.page_content
- ))
-
-
- # Save documents in PostgreSQL
- user = db.query(User).filter(User.username == username).first()
- user.documents.extend(DocumentPgEntry)
- db.commit()
-
- # Create hierarchical indices
- if IS_LOCAL_SETUP == True:
- index = HIndices(username=username)
- else:
- index = HIndices(username=username, api_key=api_key)
-
- # Save indices in vector stores
- index.encode_docs_hierarchical(documents=raw_documents, files_type='OTHER', search_space=search_space.upper(), db=db)
-
- print("FINISHED")
-
- return {
- "message": "Files Uploaded Successfully"
- }
-
- except JWTError:
- raise HTTPException(status_code=403, detail="Token is invalid or expired")
-
-@app.post("/chat/")
-def get_user_query_response(data: UserQuery, response_model=UserQueryResponse):
- try:
- payload = jwt.decode(data.token, SECRET_KEY, algorithms=[ALGORITHM])
- username: str = payload.get("sub")
- if username is None:
- raise HTTPException(status_code=403, detail="Token is invalid or expired")
-
- query = data.query
- search_space = data.search_space
-
-
- # Create Heirarical Indecices
- if(IS_LOCAL_SETUP == True):
- index = HIndices(username=username)
- else:
- index = HIndices(username=username,api_key=data.openaikey)
-
- #Implement HyDe over it if you crazy
- sub_queries = []
- sub_queries.append(query)
-
- duplicate_related_summary_docs = []
- for sub_query in sub_queries:
- # I know this is not the best way to do it, but I am too lazy to change it now
- related_summary_docs = index.summary_vector_search(query=sub_query, search_space=search_space)
- duplicate_related_summary_docs.extend(related_summary_docs)
-
-
- combined_docs_seen_metadata = set()
- combined_docs_unique_documents = []
-
- for doc in duplicate_related_summary_docs:
- # Convert metadata to a tuple of its items (this allows it to be added to a set)
- doc.metadata['relevance_score'] = 0.0
- metadata_tuple = tuple(sorted(doc.metadata.items()))
- if metadata_tuple not in combined_docs_seen_metadata:
- combined_docs_seen_metadata.add(metadata_tuple)
- combined_docs_unique_documents.append(doc)
-
- returnDocs = []
- for doc in combined_docs_unique_documents:
- entry = DocWithContent(
- DocMetadata=stringify(doc.metadata),
- Content=doc.page_content
- )
-
- returnDocs.append(entry)
-
-
-
- finalans = index.new_search(query=query, search_space=search_space)
-
- return UserQueryResponse(response=finalans, relateddocs=returnDocs)
-
-
- except JWTError:
- raise HTTPException(status_code=403, detail="Token is invalid or expired")
-
-# SAVE DOCS
-@app.post("/save/")
-def save_data(apires: RetrivedDocList, db: Session = Depends(get_db)):
- """
- Save retrieved documents to the database and encode them for hierarchical indexing.
-
- This endpoint processes the provided documents, saves related information
- in the PostgreSQL database, and updates hierarchical indices for the user.
- Args:
- apires (RetrivedDocList): The list of retrieved documents with metadata.
- db (Session, optional): Dependency-injected session for database operations.
-
- Returns:
- dict: A message indicating the success of the operation.
-
- Raises:
- HTTPException: If the token is invalid or expired.
- """
- try:
- # Decode token and extract username
- payload = jwt.decode(apires.token, SECRET_KEY, algorithms=[ALGORITHM])
- username: str = payload.get("sub")
- if username is None:
- raise HTTPException(status_code=403, detail="Token is invalid or expired")
-
- print("STARTED")
-
- # Initialize containers for documents and entries
- DocumentPgEntry = []
- raw_documents = []
-
- # Fetch the search space from the database
- searchspace = db.query(SearchSpace).filter(SearchSpace.search_space == apires.search_space.upper()).first()
- if not searchspace:
- stmt = insert(SearchSpace).values(search_space=apires.search_space.upper())
- db.execute(stmt)
- db.commit()
-
- # Process each document in the retrieved document list
- for doc in apires.documents:
- # Construct document content
- content = (
- f"USER BROWSING SESSION EVENT: \n"
- f"=======================================METADATA==================================== \n"
- f"User Browsing Session ID : {doc.metadata.BrowsingSessionId} \n"
- f"User Visited website with url : {doc.metadata.VisitedWebPageURL} \n"
- f"This visited website url had title : {doc.metadata.VisitedWebPageTitle} \n"
- f"User Visited this website from referring url : {doc.metadata.VisitedWebPageReffererURL} \n"
- f"User Visited this website url at this Date and Time : {doc.metadata.VisitedWebPageDateWithTimeInISOString} \n"
- f"User Visited this website for : {str(doc.metadata.VisitedWebPageVisitDurationInMilliseconds)} milliseconds. \n"
- f"===================================================================================== \n"
- f"Webpage Content of the visited webpage url in markdown format : \n\n{doc.pageContent}\n\n"
- f"===================================================================================== \n"
- )
- raw_documents.append(Document(page_content=content, metadata=doc.metadata.__dict__))
-
- # Stringify the document metadata
- pgdocmeta = stringify(doc.metadata.__dict__)
-
- DocumentPgEntry.append(Documents(
- file_type='WEBPAGE',
- title=doc.metadata.VisitedWebPageTitle,
- search_space=searchspace,
- document_metadata=pgdocmeta,
- page_content=content
- ))
-
- # Save documents in PostgreSQL
- user = db.query(User).filter(User.username == username).first()
- user.documents.extend(DocumentPgEntry)
- db.commit()
-
- # Create hierarchical indices
- if IS_LOCAL_SETUP == True:
- index = HIndices(username=username)
- else:
- index = HIndices(username=username, api_key=apires.openaikey)
-
- # Save indices in vector stores
- index.encode_docs_hierarchical(documents=raw_documents, files_type='WEBPAGE', search_space=apires.search_space.upper(), db=db)
-
- print("FINISHED")
-
- return {
- "success": "Graph Will be populated Shortly"
- }
-
- except JWTError:
- raise HTTPException(status_code=403, detail="Token is invalid or expired")
-
-# Multi DOC Chat
-@app.post("/chat/docs")
-def doc_chat_with_history(data: UserQueryWithChatHistory, response_model=DescriptionResponse):
- try:
- payload = jwt.decode(data.token, SECRET_KEY, algorithms=[ALGORITHM])
- username: str = payload.get("sub")
- if username is None:
- raise HTTPException(status_code=403, detail="Token is invalid or expired")
-
- if(IS_LOCAL_SETUP == True):
- llm = OllamaLLM(model=MODEL_NAME,temperature=0)
- else:
- llm = ChatOpenAI(temperature=0, model_name=MODEL_NAME, api_key=data.openaikey)
-
- chatHistory = []
-
- for chat in data.chat:
- if(chat.type == 'system'):
- chatHistory.append(SystemMessage(content=DATE_TODAY + """You are an helpful assistant for question-answering tasks.
- Use the following pieces of retrieved context to answer the question.
- If you don't know the answer, just say that you don't know.
- Context:""" + str(chat.content)))
-
- if(chat.type == 'ai'):
- chatHistory.append(AIMessage(content=chat.content))
-
- if(chat.type == 'human'):
- chatHistory.append(HumanMessage(content=chat.content))
-
- chatHistory.append(("human", "{input}"));
-
-
- qa_prompt = ChatPromptTemplate.from_messages(chatHistory)
-
- descriptionchain = qa_prompt | llm
-
- response = descriptionchain.invoke({"input": data.query})
-
- if(IS_LOCAL_SETUP == True):
- return DescriptionResponse(response=response)
- else:
- return DescriptionResponse(response=response.content)
-
- except JWTError:
- raise HTTPException(status_code=403, detail="Token is invalid or expired")
-
-
- # Multi DOC Chat
-
-@app.post("/delete/docs")
-def delete_all_related_data(data: DocumentsToDelete, db: Session = Depends(get_db)):
- try:
- payload = jwt.decode(data.token, SECRET_KEY, algorithms=[ALGORITHM])
- username: str = payload.get("sub")
- if username is None:
- raise HTTPException(status_code=403, detail="Token is invalid or expired")
-
- if(IS_LOCAL_SETUP == True):
- index = HIndices(username=username)
- else:
- index = HIndices(username=username,api_key=data.openaikey)
-
- message = index.delete_vector_stores(summary_ids_to_delete=data.ids_to_delete,db=db )
-
- return {
- "message": message
- }
-
- except JWTError:
- raise HTTPException(status_code=403, detail="Token is invalid or expired")
-
-
-
# Manual Origins
# origins = [
@@ -393,11 +94,11 @@ def create_user(db: Session, user: UserCreate):
def register_user(user: UserCreate, db: Session = Depends(get_db)):
if(user.apisecretkey != API_SECRET_KEY):
raise HTTPException(status_code=401, detail="Unauthorized")
-
+
db_user = get_user_by_username(db, username=user.username)
if db_user:
raise HTTPException(status_code=400, detail="Username already registered")
-
+
del user.apisecretkey
return create_user(db=db, user=user)
@@ -451,100 +152,730 @@ async def verify_user_token(token: str):
verify_token(token=token)
return {"message": "Token is valid"}
-@app.post("/user/chat/save")
-def populate_user_chat(chat: NewUserChat, db: Session = Depends(get_db)):
+
+@app.post("/searchspace/{search_space_id}/chat/create")
+def create_chat_in_searchspace(chat: NewUserChat, search_space_id: int, db: Session = Depends(get_db)):
try:
payload = jwt.decode(chat.token, SECRET_KEY, algorithms=[ALGORITHM])
username: str = payload.get("sub")
if username is None:
raise HTTPException(status_code=403, detail="Token is invalid or expired")
-
- user = db.query(User).filter(User.username == username).first()
- newchat = Chat(type=chat.type, title=chat.title, chats_list=chat.chats_list)
-
- user.chats.append(newchat)
+
+ search_space = db.query(SearchSpace).filter(
+ SearchSpace.id == search_space_id,
+ SearchSpace.user_id == db.query(User).filter(User.username == username).first().id
+ ).first()
+
+ if not search_space:
+ raise HTTPException(status_code=404, detail="SearchSpace not found or does not belong to the user")
+
+ new_chat = Chat(type=chat.type, title=chat.title, chats_list=chat.chats_list)
+
+ search_space.chats.append(new_chat)
+
db.commit()
- return {
- "message": "Chat Saved"
- }
+ db.refresh(new_chat)
+
+ return {"chat_id": new_chat.id}
+
except JWTError:
raise HTTPException(status_code=403, detail="Token is invalid or expired")
-@app.post("/user/chat/update")
-def populate_user_chat(chat: ChatToUpdate, db: Session = Depends(get_db)):
+@app.post("/searchspace/{search_space_id}/chat/update")
+def update_chat_in_searchspace(chat: ChatToUpdate, search_space_id: int, db: Session = Depends(get_db)):
try:
payload = jwt.decode(chat.token, SECRET_KEY, algorithms=[ALGORITHM])
username: str = payload.get("sub")
if username is None:
raise HTTPException(status_code=403, detail="Token is invalid or expired")
-
- chatindb = db.query(Chat).filter(Chat.id == chat.chatid).first()
+
+ chatindb = db.query(Chat).join(SearchSpace).filter(
+ Chat.id == chat.chatid,
+ SearchSpace.id == search_space_id,
+ SearchSpace.user_id == db.query(User).filter(User.username == username).first().id
+ ).first()
+
+ if not chatindb:
+ raise HTTPException(status_code=404, detail="Chat not found or does not belong to the searchspace owned by the user")
+
chatindb.chats_list = chat.chats_list
-
db.commit()
- return {
- "message": "Chat Updated"
- }
+ return {"message": "Chat Updated"}
except JWTError:
raise HTTPException(status_code=403, detail="Token is invalid or expired")
-@app.get("/user/chat/delete/{token}/{chatid}")
-async def delete_chat_of_user(token: str, chatid: str, db: Session = Depends(get_db)):
+@app.get("/searchspace/{search_space_id}/chat/delete/{token}/{chatid}")
+async def delete_chat_in_searchspace(token: str, search_space_id: int, chatid: str, db: Session = Depends(get_db)):
try:
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
username: str = payload.get("sub")
if username is None:
raise HTTPException(status_code=403, detail="Token is invalid or expired")
-
- chatindb = db.query(Chat).filter(Chat.id == chatid).first()
+
+ chatindb = db.query(Chat).join(SearchSpace).filter(
+ Chat.id == chatid,
+ SearchSpace.id == search_space_id,
+ SearchSpace.user_id == db.query(User).filter(User.username == username).first().id
+ ).first()
+
+ if not chatindb:
+ raise HTTPException(status_code=404, detail="Chat not found or does not belong to the searchspace owned by the user")
+
db.delete(chatindb)
db.commit()
- return {
- "message": "Chat Deleted"
- }
+ return {"message": "Chat Deleted"}
except JWTError:
raise HTTPException(status_code=403, detail="Token is invalid or expired")
-
-#Gets user id & name
-@app.get("/user/{token}")
-async def get_user_with_token(token: str, db: Session = Depends(get_db)):
+
+@app.get("/searchspace/{search_space_id}/chat/{token}/{chatid}")
+def get_chat_by_id_in_searchspace(chatid: int, search_space_id: int, token: str, db: Session = Depends(get_db)):
try:
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
username: str = payload.get("sub")
if username is None:
raise HTTPException(status_code=403, detail="Token is invalid or expired")
-
+
+ chat = db.query(Chat).join(SearchSpace).filter(
+ Chat.id == chatid,
+ SearchSpace.id == search_space_id,
+ SearchSpace.user_id == db.query(User).filter(User.username == username).first().id
+ ).first()
+
+ if not chat:
+ raise HTTPException(status_code=404, detail="Chat not found or does not belong to the searchspace owned by the user")
+
+ return chat
+ except JWTError:
+ raise HTTPException(status_code=403, detail="Token is invalid or expired")
+
+@app.get("/searchspace/{search_space_id}/chats/{token}")
+def get_chats_in_searchspace(search_space_id: int, token: str, db: Session = Depends(get_db)):
+ try:
+ payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
+ username: str = payload.get("sub")
+ if username is None:
+ raise HTTPException(status_code=403, detail="Token is invalid or expired")
+
user = db.query(User).filter(User.username == username).first()
- return {
- "userid": user.id,
- "username": user.username,
- "chats": user.chats,
- "documents": user.documents
- }
+ if not user:
+ raise HTTPException(status_code=404, detail="User not found")
+
+ # Filter chats that are specifically in the given search space
+ chats = db.query(Chat).filter(
+ Chat.search_space_id == search_space_id,
+ SearchSpace.user_id == user.id
+ ).join(SearchSpace).all()
+
+ return chats
+
except JWTError:
raise HTTPException(status_code=403, detail="Token is invalid or expired")
-@app.get("/searchspaces/{token}")
-async def get_user_with_token(token: str, db: Session = Depends(get_db)):
+
+
+
+@app.get("/user/{token}/searchspace/{search_space_id}/documents/")
+def get_user_documents(search_space_id: int, token: str, db: Session = Depends(get_db)):
+ try:
+ # Decode the token to get the username
+ payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
+ username: str = payload.get("sub")
+ if username is None:
+ raise HTTPException(status_code=403, detail="Token is invalid or expired")
+
+ # Get the user by username and ensure they exist
+ user = db.query(User).filter(User.username == username).first()
+ if not user:
+ raise HTTPException(status_code=404, detail="User not found")
+
+ # Verify the search space belongs to the user
+ search_space = db.query(SearchSpace).filter(
+ SearchSpace.id == search_space_id,
+ SearchSpace.user_id == user.id
+ ).first()
+ if not search_space:
+ raise HTTPException(status_code=404, detail="Search space not found or does not belong to the user")
+
+ # Retrieve documents associated with the search space
+ return db.query(Documents).filter(Documents.search_space_id == search_space_id).all()
+
+ except JWTError:
+ raise HTTPException(status_code=403, detail="Token is invalid or expired")
+
+@app.get("/user/{token}/searchspace/{search_space_id}/")
+def get_user_search_space_by_id(search_space_id: int, token: str, db: Session = Depends(get_db)):
try:
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
username: str = payload.get("sub")
if username is None:
raise HTTPException(status_code=403, detail="Token is invalid or expired")
-
- search_spaces = db.query(SearchSpace).all()
- return {
- "search_spaces": search_spaces
- }
+
+ # Get the user by username
+ user = db.query(User).filter(User.username == username).first()
+ if not user:
+ raise HTTPException(status_code=404, detail="User not found")
+
+ # Get the search space by ID and verify it belongs to this user
+ search_space = db.query(SearchSpace).filter(
+ SearchSpace.id == search_space_id,
+ SearchSpace.user_id == user.id
+ ).first()
+ if not search_space:
+ raise HTTPException(status_code=404, detail="Search space not found or does not belong to the user")
+
+ return search_space
except JWTError:
raise HTTPException(status_code=403, detail="Token is invalid or expired")
-
+@app.get("/user/{token}/searchspaces/")
+def get_user_search_spaces(token: str, db: Session = Depends(get_db)):
+ try:
+ payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
+ username: str = payload.get("sub")
+ if username is None:
+ raise HTTPException(status_code=403, detail="Token is invalid or expired")
+
+ user = db.query(User).filter(User.username == username).first()
+
+ return db.query(SearchSpace).filter(SearchSpace.user_id == user.id).all()
+ except JWTError:
+ raise HTTPException(status_code=403, detail="Token is invalid or expired")
+
+@app.post("/user/create/searchspace/")
+def create_user_search_space(data: CreateStorageSpace, db: Session = Depends(get_db)):
+ try:
+ payload = jwt.decode(data.token, SECRET_KEY, algorithms=[ALGORITHM])
+ username: str = payload.get("sub")
+ if username is None:
+ raise HTTPException(status_code=403, detail="Token is invalid or expired")
+
+ user = db.query(User).filter(User.username == username).first()
+
+ db_search_space = SearchSpace(user_id=user.id, name=data.name, description=data.description)
+ db.add(db_search_space)
+ db.commit()
+ db.refresh(db_search_space)
+ return db_search_space
+ except JWTError:
+ raise HTTPException(status_code=403, detail="Token is invalid or expired")
+
+@app.post("/user/save/")
+def save_user_extension_documents(data: RetrivedDocList, db: Session = Depends(get_db)):
+ try:
+ payload = jwt.decode(data.token, SECRET_KEY, algorithms=[ALGORITHM])
+ username: str = payload.get("sub")
+ if username is None:
+ raise HTTPException(status_code=403, detail="Token is invalid or expired")
+
+ # Get the user by username and ensure they exist
+ user = db.query(User).filter(User.username == username).first()
+ if not user:
+ raise HTTPException(status_code=404, detail="User not found")
+
+ # Verify the search space belongs to the user
+ search_space = db.query(SearchSpace).filter(
+ SearchSpace.id == data.search_space_id,
+ SearchSpace.user_id == user.id
+ ).first()
+ if not search_space:
+ raise HTTPException(status_code=404, detail="Search space not found or does not belong to the user")
+
+
+ # all_search_space_docs = db.query(SearchSpace).filter(
+ # SearchSpace.user_id == user.id
+ # ).all()
+
+ # total_doc_count = 0
+ # for search_space in all_search_space_docs:
+ # total_doc_count += db.query(Documents).filter(Documents.search_space_id == search_space.id).count()
+
+ print(f"STARTED")
+
+ # Initialize containers for documents and entries
+ # DocumentPgEntry = []
+ raw_documents = []
+
+ # Process each document in the retrieved document list
+ for doc in data.documents:
+ # Construct document content
+ content = (
+ f"USER BROWSING SESSION EVENT: \n"
+ f"=======================================METADATA==================================== \n"
+ f"User Browsing Session ID : {doc.metadata.BrowsingSessionId} \n"
+ f"User Visited website with url : {doc.metadata.VisitedWebPageURL} \n"
+ f"This visited website url had title : {doc.metadata.VisitedWebPageTitle} \n"
+ f"User Visited this website from referring url : {doc.metadata.VisitedWebPageReffererURL} \n"
+ f"User Visited this website url at this Date and Time : {doc.metadata.VisitedWebPageDateWithTimeInISOString} \n"
+ f"User Visited this website for : {str(doc.metadata.VisitedWebPageVisitDurationInMilliseconds)} milliseconds. \n"
+ f"===================================================================================== \n"
+ f"Webpage Content of the visited webpage url in markdown format : \n\n{doc.pageContent}\n\n"
+ f"===================================================================================== \n"
+ )
+ raw_documents.append(Document(page_content=content, metadata=doc.metadata.__dict__))
+
+
+
+
+ # pgdocmeta = stringify(doc.metadata.__dict__)
+
+ # DocumentPgEntry.append(Documents(
+ # file_type='WEBPAGE',
+ # title=doc.metadata.VisitedWebPageTitle,
+ # search_space=search_space,
+ # document_metadata=pgdocmeta,
+ # page_content=content
+ # ))
+
+ # # Save documents in PostgreSQL
+ # search_space.documents.extend(DocumentPgEntry)
+ # db.commit()
+
+ # Create hierarchical indices
+ if IS_LOCAL_SETUP == True:
+ index = HIndices(username=username)
+ else:
+ index = HIndices(username=username, api_key=OPENAI_API_KEY)
+
+ # Save indices in vector stores
+ index.encode_docs_hierarchical(
+ documents=raw_documents,
+ search_space_instance=search_space,
+ files_type='WEBPAGE',
+ db=db
+ )
+
+ print("FINISHED")
+
+ return {
+ "success": "Save Job Completed Successfully"
+ }
+
+
+ except JWTError:
+ raise HTTPException(status_code=403, detail="Token is invalid or expired")
+
+@app.post("/user/uploadfiles/")
+def save_user_documents(files: list[UploadFile], token: str = Depends(oauth2_scheme), search_space_id: int = Form(...), db: Session = Depends(get_db)):
+ try:
+ # Decode and verify the token
+ payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
+ username: str = payload.get("sub")
+ if username is None:
+ raise HTTPException(status_code=403, detail="Token is invalid or expired")
+
+ # Get the user by username and ensure they exist
+ user = db.query(User).filter(User.username == username).first()
+ if not user:
+ raise HTTPException(status_code=404, detail="User not found")
+
+ # Verify the search space belongs to the user
+ search_space = db.query(SearchSpace).filter(
+ SearchSpace.id == search_space_id,
+ SearchSpace.user_id == user.id
+ ).first()
+ if not search_space:
+ raise HTTPException(status_code=404, detail="Search space not found or does not belong to the user")
+
+ docs = []
+
+ for file in files:
+ if file.content_type.startswith('image'):
+ loader = UnstructuredLoader(
+ file=file.file,
+ api_key=UNSTRUCTURED_API_KEY,
+ partition_via_api=True,
+ chunking_strategy="basic",
+ max_characters=90000,
+ include_orig_elements=False,
+ )
+ else:
+ loader = UnstructuredLoader(
+ file=file.file,
+ api_key=UNSTRUCTURED_API_KEY,
+ partition_via_api=True,
+ chunking_strategy="basic",
+ max_characters=90000,
+ include_orig_elements=False,
+ strategy="fast"
+ )
+
+ filedocs = loader.load()
+
+
+ fileswithfilename = []
+ for f in filedocs:
+ temp = f
+ temp.metadata['filename'] = file.filename
+ fileswithfilename.append(temp)
+
+ docs.extend(fileswithfilename)
+
+ raw_documents = []
+
+ # Process each document in the retrieved document list
+ for doc in docs:
+ raw_documents.append(Document(page_content=doc.page_content, metadata=doc.metadata))
+
+ # Create hierarchical indices
+ if IS_LOCAL_SETUP == True:
+ index = HIndices(username=username)
+ else:
+ index = HIndices(username=username, api_key=OPENAI_API_KEY)
+
+ # Save indices in vector stores
+ index.encode_docs_hierarchical(documents=raw_documents, search_space_instance=search_space, files_type='OTHER', db=db)
+
+ print("FINISHED")
+
+ return {
+ "message": "Files Uploaded Successfully"
+ }
+
+ except JWTError:
+ raise HTTPException(status_code=403, detail="Token is invalid or expired")
+
+@app.websocket("/beta/chat/{search_space_id}/{token}")
+async def searchspace_chat_websocket_endpoint(websocket: WebSocket, search_space_id: int, token: str, db: Session = Depends(get_db)):
+ try:
+ payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
+ username: str = payload.get("sub")
+ if username is None:
+ raise HTTPException(status_code=403, detail="Token is invalid or expired")
+
+ # Get the user by username and ensure they exist
+ user = db.query(User).filter(User.username == username).first()
+ if not user:
+ raise HTTPException(status_code=404, detail="User not found")
+
+ # Verify the search space belongs to the user
+ search_space = db.query(SearchSpace).filter(
+ SearchSpace.id == search_space_id,
+ SearchSpace.user_id == user.id
+ ).first()
+ if not search_space:
+ raise HTTPException(status_code=404, detail="Search space not found or does not belong to the user")
+
+ await manager.connect(websocket)
+ try:
+ while True:
+ data = await websocket.receive_text()
+ message = json.loads(data)
+ # print(message)
+ if message["type"] == "search_space_chat":
+ query = message["content"]
+
+ if message["searchtype"] == "local" :
+ report_source = "langchain_documents"
+ else:
+ report_source = message["searchtype"]
+
+ if message["answertype"] == "general_answer" :
+ report_type = "custom_report"
+ else:
+ report_type = message["answertype"]
+
+
+ # Create Heirarical Indecices
+ if(IS_LOCAL_SETUP == True):
+ index = HIndices(username=username)
+ else:
+ index = HIndices(username=username,api_key=OPENAI_API_KEY)
+
+
+ await index.ws_experimental_search(websocket=websocket, manager=manager, query=query, search_space=search_space.name, report_type=report_type, report_source=report_source)
+
+ await manager.send_personal_message(
+ json.dumps({"type": "end"}),
+ websocket
+ )
+
+
+
+ if message["type"] == "multiple_documents_chat":
+ query = message["content"]
+ received_chat_history = message["chat_history"]
+
+ chatHistory = []
+
+ chatHistory = [
+ SystemMessage(
+ content=DATE_TODAY + """You are an helpful assistant for question-answering tasks.
+ Use the following pieces of retrieved context to answer the question.
+ If you don't know the answer, just say that you don't know.
+ Context:""" + str(received_chat_history[0]['relateddocs']))
+ ]
+
+ for data in received_chat_history[1:]:
+ if data["role"] == "user":
+ chatHistory.append(HumanMessage(content=data["content"]))
+
+ if data["role"] == "assistant":
+ chatHistory.append(AIMessage(content=data["content"]))
+
+
+ chatHistory.append(("human", "{input}"))
+
+ qa_prompt = ChatPromptTemplate.from_messages(chatHistory)
+
+ if(IS_LOCAL_SETUP == True):
+ llm = OllamaLLM(model=MODEL_NAME,temperature=0)
+ else:
+ llm = ChatOpenAI(temperature=0, model_name=MODEL_NAME, api_key=OPENAI_API_KEY)
+
+ descriptionchain = qa_prompt | llm
+
+ streamingResponse = ""
+ counter = 0
+ for res in descriptionchain.stream({"input": query}):
+ streamingResponse += res.content
+
+ if (counter < 20) :
+ counter += 1
+ else :
+ await manager.send_personal_message(
+ json.dumps({"type": "stream", "content": streamingResponse}),
+ websocket
+ )
+
+ counter = 0
+
+ await manager.send_personal_message(
+ json.dumps({"type": "stream", "content": streamingResponse}),
+ websocket
+ )
+
+ await manager.send_personal_message(
+ json.dumps({"type": "end"}),
+ websocket
+ )
+ except Exception as e:
+ print(f"Error: {e}")
+ finally:
+ manager.disconnect(websocket)
+ except JWTError:
+ await websocket.close(code=4003, reason="Invalid token")
+
+@app.post("/user/searchspace/create-podcast")
+async def create_podcast(
+ data: CreatePodcast,
+ background_tasks: BackgroundTasks,
+ db: Session = Depends(get_db)
+):
+ try:
+ # Verify token and get username
+ payload = jwt.decode(data.token, SECRET_KEY, algorithms=[ALGORITHM])
+ username: str = payload.get("sub")
+ if username is None:
+ raise HTTPException(status_code=403, detail="Token is invalid or expired")
+
+ # Get user
+ user = db.query(User).filter(User.username == username).first()
+ if not user:
+ raise HTTPException(status_code=404, detail="User not found")
+
+ # Verify search space belongs to user
+ search_space = db.query(SearchSpace).filter(
+ SearchSpace.id == data.search_space_id,
+ SearchSpace.user_id == user.id
+ ).first()
+ if not search_space:
+ raise HTTPException(status_code=404, detail="Search space not found or does not belong to the user")
+
+ # Create new podcast entry
+ new_podcast = Podcast(
+ title=data.title,
+ podcast_content=data.podcast_content,
+ search_space_id=search_space.id
+ )
+
+ db.add(new_podcast)
+ db.commit()
+ db.refresh(new_podcast)
+
+ podcast_config = {
+ 'word_count': data.wordcount,
+ 'podcast_name': 'SurfSense Podcast',
+ 'podcast_tagline': 'Your Own Personal Podcast.',
+ 'output_language': 'English',
+ 'user_instructions': 'Make if fun and engaging',
+ 'engagement_techniques': ['Rhetorical Questions', 'Personal Testimonials', 'Quotes', 'Anecdotes', 'Analogies', 'Humor'],
+ }
+
+ try:
+ background_tasks.add_task(
+ generate_podcast_background,
+ new_podcast.id,
+ data.podcast_content,
+ MODEL_NAME,
+ "OPENAI_API_KEY",
+ podcast_config,
+ db
+ )
+ # # Check MODEL NAME behavior on Local Setups
+ # saved_file_location = generate_podcast(
+ # text=data.podcast_content,
+ # llm_model_name=MODEL_NAME,
+ # api_key_label="OPENAI_API_KEY",
+ # conversation_config=podcast_config,
+ # )
+
+ # new_podcast.file_location = saved_file_location
+ # new_podcast.is_generated = True
+
+ # db.commit()
+ # db.refresh(new_podcast)
+
+
+ return {"message": "Podcast created successfully", "podcast_id": new_podcast.id}
+ except JWTError:
+ raise HTTPException(status_code=403, detail="Token is invalid or expired")
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=str(e))
+
+
+
+ except JWTError:
+ raise HTTPException(status_code=403, detail="Token is invalid or expired")
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=str(e))
+
+
+async def generate_podcast_background(
+ podcast_id: int,
+ podcast_content: str,
+ model_name: str,
+ api_key_label: str,
+ conversation_config: dict,
+ db: Session
+):
+ try:
+ saved_file_location = generate_podcast(
+ text=podcast_content,
+ llm_model_name=model_name,
+ api_key_label=api_key_label,
+ conversation_config=conversation_config,
+ )
+
+ # Update podcast in database
+ podcast = db.query(Podcast).filter(Podcast.id == podcast_id).first()
+ if podcast:
+ podcast.file_location = saved_file_location
+ podcast.is_generated = True
+ db.commit()
+ except Exception as e:
+ # Log the error or handle it appropriately
+ print(f"Error generating podcast: {str(e)}")
+
+
+@app.get("/user/{token}/searchspace/{search_space_id}/download-podcast/{podcast_id}")
+async def download_podcast(search_space_id: int, podcast_id: int, token: str, db: Session = Depends(get_db)):
+ try:
+ # Verify the token and get the username
+ payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
+ username: str = payload.get("sub")
+ if username is None:
+ raise HTTPException(status_code=403, detail="Token is invalid or expired")
+
+ # Get the user by username
+ user = db.query(User).filter(User.username == username).first()
+ if not user:
+ raise HTTPException(status_code=404, detail="User not found")
+
+ # Verify the search space belongs to the user
+ search_space = db.query(SearchSpace).filter(
+ SearchSpace.id == search_space_id,
+ SearchSpace.user_id == user.id
+ ).first()
+ if not search_space:
+ raise HTTPException(status_code=404, detail="Search space not found or does not belong to the user")
+
+ # Retrieve the podcast file from the database
+ podcast = db.query(Podcast).filter(
+ Podcast.id == podcast_id,
+ Podcast.search_space_id == search_space_id
+ ).first()
+ if not podcast:
+ raise HTTPException(status_code=404, detail="Podcast not found in the specified search space")
+
+ # Read the file content
+ with open(podcast.file_location, "rb") as file:
+ file_content = file.read()
+
+ # Create a response with the file content
+ response = Response(content=file_content)
+ response.headers["Content-Disposition"] = f"attachment; filename={podcast.title}.mp3"
+ response.headers["Content-Type"] = "audio/mpeg"
+
+ return response
+ except JWTError:
+ raise HTTPException(status_code=403, detail="Token is invalid or expired")
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=str(e))
+
+
+@app.get("/user/{token}/searchspace/{search_space_id}/podcasts")
+async def get_user_podcasts(token: str, search_space_id: int, db: Session = Depends(get_db)):
+ try:
+ payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
+ username: str = payload.get("sub")
+ if username is None:
+ raise HTTPException(status_code=403, detail="Token is invalid or expired")
+
+ user = db.query(User).filter(User.username == username).first()
+ if not user:
+ raise HTTPException(status_code=404, detail="User not found")
+
+ search_space = db.query(SearchSpace).filter(
+ SearchSpace.id == search_space_id,
+ SearchSpace.user_id == user.id
+ ).first()
+ if not search_space:
+ raise HTTPException(status_code=404, detail="Search space not found or does not belong to the user")
+
+ podcasts = db.query(Podcast).filter(Podcast.search_space_id == search_space_id).all()
+ return podcasts
+ except JWTError:
+ raise HTTPException(status_code=403, detail="Token is invalid or expired")
+
+# Incomplete function, needs to be implemented based on the actual requirements and database structure
+@app.post("/searchspace/{search_space_id}/delete/docs")
+def delete_all_related_data(search_space_id: int, data: DocumentsToDelete, db: Session = Depends(get_db)):
+ try:
+ payload = jwt.decode(data.token, SECRET_KEY, algorithms=[ALGORITHM])
+ username: str = payload.get("sub")
+ if username is None:
+ raise HTTPException(status_code=403, detail="Token is invalid or expired")
+
+ # Get the user by username and ensure they exist
+ user = db.query(User).filter(User.username == username).first()
+ if not user:
+ raise HTTPException(status_code=404, detail="User not found")
+
+ # Verify the search space belongs to the user
+ search_space = db.query(SearchSpace).filter(
+ SearchSpace.id == search_space_id,
+ SearchSpace.user_id == user.id
+ ).first()
+ if not search_space:
+ raise HTTPException(status_code=404, detail="Search space not found or does not belong to the user")
+
+ if IS_LOCAL_SETUP:
+ index = HIndices(username=username)
+ else:
+ index = HIndices(username=username, api_key=OPENAI_API_KEY)
+
+ message = index.delete_vector_stores(summary_ids_to_delete=data.ids_to_delete, db=db, search_space=search_space.name)
+
+ return {
+ "message": message
+ }
+
+ except JWTError:
+ raise HTTPException(status_code=403, detail="Token is invalid or expired")
+
+
if __name__ == "__main__":
import uvicorn
- uvicorn.run(app, host="127.0.0.1", port=8000)
-
-
-
+ uvicorn.run(app, ws="wsproto", host="127.0.0.1", port=8000)
\ No newline at end of file