add relevant coderrabit suggestions

This commit is contained in:
CREDO23 2025-07-27 13:31:41 +02:00
parent dee54bf5e1
commit 8e52a0b201
7 changed files with 534 additions and 296 deletions

View file

@ -920,29 +920,32 @@ async def fetch_relevant_documents(
}
)
elif connector == "CONFLUENCE_CONNECTOR":
source_object, confluence_chunks = await connector_service.search_confluence(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
(
source_object,
confluence_chunks,
) = await connector_service.search_confluence(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
)
# Add to sources and raw documents
if source_object:
all_sources.append(source_object)
all_raw_documents.extend(confluence_chunks)
# Stream found document count
if streaming_service and writer:
writer(
{
"yield_value": streaming_service.format_terminal_info_delta(
f"📚 Found {len(confluence_chunks)} Confluence pages related to your query"
)
}
)
# Add to sources and raw documents
if source_object:
all_sources.append(source_object)
all_raw_documents.extend(confluence_chunks)
# Stream found document count
if streaming_service and writer:
writer(
{
"yield_value": streaming_service.format_terminal_info_delta(
f"📚 Found {len(confluence_chunks)} Confluence pages related to your query"
)
}
)
except Exception as e:
error_message = f"Error searching connector {connector}: {e!s}"
print(error_message)

View file

@ -6,7 +6,6 @@ Allows fetching pages and their comments from specified spaces.
"""
import base64
from datetime import datetime, timezone
from typing import Any
import requests

View file

@ -918,7 +918,9 @@ async def run_confluence_indexing_with_new_session(
await run_confluence_indexing(
session, connector_id, search_space_id, user_id, start_date, end_date
)
logger.info(f"Background task finished: Indexing Confluence connector {connector_id}")
logger.info(
f"Background task finished: Indexing Confluence connector {connector_id}"
)
async def run_confluence_indexing(

View file

@ -145,7 +145,11 @@ class SearchSourceConnectorBase(BaseModel):
elif connector_type == SearchSourceConnectorType.CONFLUENCE_CONNECTOR:
# For CONFLUENCE_CONNECTOR, only allow specific keys
allowed_keys = ["CONFLUENCE_BASE_URL", "CONFLUENCE_EMAIL", "CONFLUENCE_API_TOKEN"]
allowed_keys = [
"CONFLUENCE_BASE_URL",
"CONFLUENCE_EMAIL",
"CONFLUENCE_API_TOKEN",
]
if set(config.keys()) != set(allowed_keys):
raise ValueError(
f"For CONFLUENCE_CONNECTOR connector type, config must only contain these keys: {allowed_keys}"

View file

@ -1072,6 +1072,7 @@ class ConnectorService:
}
return result_object, jira_chunks
async def search_confluence(
self,
user_query: str,
@ -1145,7 +1146,7 @@ class ConnectorService:
description += "..."
# For URL, we can use a placeholder or construct a URL to the Confluence page if available
url = ""
url = "" # TODO: Add base_url to metadata
if page_id:
url = f"{metadata.get('base_url')}/pages/{page_id}"

View file

@ -2413,7 +2413,9 @@ async def index_confluence_pages(
)
confluence_client = ConfluenceConnector(
base_url=confluence_base_url, email=confluence_email, api_token=confluence_api_token
base_url=confluence_base_url,
email=confluence_email,
api_token=confluence_api_token,
)
# Calculate date range
@ -2528,9 +2530,7 @@ async def index_confluence_pages(
logger.warning(
f"Skipping page with missing ID or title: {page_id or 'Unknown'}"
)
skipped_pages.append(
f"{page_title or 'Unknown'} (missing data)"
)
skipped_pages.append(f"{page_title or 'Unknown'} (missing data)")
documents_skipped += 1
continue
@ -2549,7 +2549,9 @@ async def index_confluence_pages(
if comment.get("body") and comment["body"].get("storage"):
comment_body = comment["body"]["storage"].get("value", "")
comment_author = comment.get("version", {}).get("authorId", "Unknown")
comment_author = comment.get("version", {}).get(
"authorId", "Unknown"
)
comment_date = comment.get("version", {}).get("createdAt", "")
comments_content += f"**Comment by {comment_author}** ({comment_date}):\n{comment_body}\n\n"
@ -2558,15 +2560,15 @@ async def index_confluence_pages(
full_content = f"# {page_title}\n\n{page_content}{comments_content}"
if not full_content.strip():
logger.warning(
f"Skipping page with no content: {page_title}"
)
logger.warning(f"Skipping page with no content: {page_title}")
skipped_pages.append(f"{page_title} (no content)")
documents_skipped += 1
continue
# Create a simple summary
summary_content = f"Confluence Page: {page_title}\n\nSpace ID: {space_id}\n\n"
summary_content = (
f"Confluence Page: {page_title}\n\nSpace ID: {space_id}\n\n"
)
if page_content:
# Take first 500 characters of content for summary
content_preview = page_content[:500]
@ -2611,9 +2613,7 @@ async def index_confluence_pages(
]
# Create and store new document
logger.info(
f"Creating new document for page {page_title}"
)
logger.info(f"Creating new document for page {page_title}")
document = Document(
search_space_id=search_space_id,
title=f"Confluence - {page_title}",
@ -2633,9 +2633,7 @@ async def index_confluence_pages(
session.add(document)
documents_indexed += 1
logger.info(
f"Successfully indexed new page {page_title}"
)
logger.info(f"Successfully indexed new page {page_title}")
except Exception as e:
logger.error(
@ -2656,7 +2654,9 @@ async def index_confluence_pages(
# Commit all changes
await session.commit()
logger.info("Successfully committed all Confluence document changes to database")
logger.info(
"Successfully committed all Confluence document changes to database"
)
# Log success
await task_logger.log_task_success(

View file

@ -1,265 +1,494 @@
import { useState, useEffect, useCallback } from 'react';
import { useRouter } from 'next/navigation';
import { useForm } from 'react-hook-form';
import { zodResolver } from '@hookform/resolvers/zod';
import { toast } from 'sonner';
import { useSearchSourceConnectors, SearchSourceConnector } from '@/hooks/useSearchSourceConnectors';
import {
GithubRepo,
EditMode,
githubPatSchema,
editConnectorSchema,
GithubPatFormValues,
EditConnectorFormValues
} from '@/components/editConnector/types';
import { useState, useEffect, useCallback } from "react";
import { useRouter } from "next/navigation";
import { useForm } from "react-hook-form";
import { zodResolver } from "@hookform/resolvers/zod";
import { toast } from "sonner";
import {
useSearchSourceConnectors,
SearchSourceConnector,
} from "@/hooks/useSearchSourceConnectors";
import {
GithubRepo,
EditMode,
githubPatSchema,
editConnectorSchema,
GithubPatFormValues,
EditConnectorFormValues,
} from "@/components/editConnector/types";
export function useConnectorEditPage(connectorId: number, searchSpaceId: string) {
const router = useRouter();
const { connectors, updateConnector, isLoading: connectorsLoading } = useSearchSourceConnectors();
export function useConnectorEditPage(
connectorId: number,
searchSpaceId: string,
) {
const router = useRouter();
const {
connectors,
updateConnector,
isLoading: connectorsLoading,
} = useSearchSourceConnectors();
// State managed by the hook
const [connector, setConnector] = useState<SearchSourceConnector | null>(null);
const [originalConfig, setOriginalConfig] = useState<Record<string, any> | null>(null);
const [isSaving, setIsSaving] = useState(false);
const [currentSelectedRepos, setCurrentSelectedRepos] = useState<string[]>([]);
const [originalPat, setOriginalPat] = useState<string>("");
const [editMode, setEditMode] = useState<EditMode>('viewing');
const [fetchedRepos, setFetchedRepos] = useState<GithubRepo[] | null>(null);
const [newSelectedRepos, setNewSelectedRepos] = useState<string[]>([]);
const [isFetchingRepos, setIsFetchingRepos] = useState(false);
// State managed by the hook
const [connector, setConnector] = useState<SearchSourceConnector | null>(
null,
);
const [originalConfig, setOriginalConfig] = useState<Record<
string,
any
> | null>(null);
const [isSaving, setIsSaving] = useState(false);
const [currentSelectedRepos, setCurrentSelectedRepos] = useState<string[]>(
[],
);
const [originalPat, setOriginalPat] = useState<string>("");
const [editMode, setEditMode] = useState<EditMode>("viewing");
const [fetchedRepos, setFetchedRepos] = useState<GithubRepo[] | null>(null);
const [newSelectedRepos, setNewSelectedRepos] = useState<string[]>([]);
const [isFetchingRepos, setIsFetchingRepos] = useState(false);
// Forms managed by the hook
const patForm = useForm<GithubPatFormValues>({
resolver: zodResolver(githubPatSchema),
defaultValues: { github_pat: "" },
});
const editForm = useForm<EditConnectorFormValues>({
resolver: zodResolver(editConnectorSchema),
defaultValues: {
name: "",
SLACK_BOT_TOKEN: "",
NOTION_INTEGRATION_TOKEN: "",
SERPER_API_KEY: "",
TAVILY_API_KEY: "",
LINEAR_API_KEY: "",
DISCORD_BOT_TOKEN: "",
CONFLUENCE_BASE_URL: "",
CONFLUENCE_EMAIL: "",
CONFLUENCE_API_TOKEN: "",
JIRA_BASE_URL: "",
JIRA_EMAIL: "",
JIRA_API_TOKEN: "",
},
});
// Forms managed by the hook
const patForm = useForm<GithubPatFormValues>({
resolver: zodResolver(githubPatSchema),
defaultValues: { github_pat: "" },
});
const editForm = useForm<EditConnectorFormValues>({
resolver: zodResolver(editConnectorSchema),
defaultValues: {
name: "",
SLACK_BOT_TOKEN: "",
NOTION_INTEGRATION_TOKEN: "",
SERPER_API_KEY: "",
TAVILY_API_KEY: "",
LINEAR_API_KEY: "",
DISCORD_BOT_TOKEN: "",
CONFLUENCE_BASE_URL: "",
CONFLUENCE_EMAIL: "",
CONFLUENCE_API_TOKEN: "",
JIRA_BASE_URL: "",
JIRA_EMAIL: "",
JIRA_API_TOKEN: "",
},
});
// Effect to load initial data
useEffect(() => {
if (!connectorsLoading && connectors.length > 0 && !connector) {
const currentConnector = connectors.find(c => c.id === connectorId);
if (currentConnector) {
setConnector(currentConnector);
const config = currentConnector.config || {};
setOriginalConfig(config);
editForm.reset({
name: currentConnector.name,
SLACK_BOT_TOKEN: config.SLACK_BOT_TOKEN || "",
NOTION_INTEGRATION_TOKEN: config.NOTION_INTEGRATION_TOKEN || "",
SERPER_API_KEY: config.SERPER_API_KEY || "",
TAVILY_API_KEY: config.TAVILY_API_KEY || "",
LINEAR_API_KEY: config.LINEAR_API_KEY || "",
LINKUP_API_KEY: config.LINKUP_API_KEY || "",
DISCORD_BOT_TOKEN: config.DISCORD_BOT_TOKEN || "",
});
if (currentConnector.connector_type === 'GITHUB_CONNECTOR') {
const savedRepos = config.repo_full_names || [];
const savedPat = config.GITHUB_PAT || "";
setCurrentSelectedRepos(savedRepos);
setNewSelectedRepos(savedRepos);
setOriginalPat(savedPat);
patForm.reset({ github_pat: savedPat });
setEditMode('viewing');
}
} else {
toast.error("Connector not found.");
router.push(`/dashboard/${searchSpaceId}/connectors`);
}
// Effect to load initial data
useEffect(() => {
if (!connectorsLoading && connectors.length > 0 && !connector) {
const currentConnector = connectors.find((c) => c.id === connectorId);
if (currentConnector) {
setConnector(currentConnector);
const config = currentConnector.config || {};
setOriginalConfig(config);
editForm.reset({
name: currentConnector.name,
SLACK_BOT_TOKEN: config.SLACK_BOT_TOKEN || "",
NOTION_INTEGRATION_TOKEN: config.NOTION_INTEGRATION_TOKEN || "",
SERPER_API_KEY: config.SERPER_API_KEY || "",
TAVILY_API_KEY: config.TAVILY_API_KEY || "",
LINEAR_API_KEY: config.LINEAR_API_KEY || "",
LINKUP_API_KEY: config.LINKUP_API_KEY || "",
DISCORD_BOT_TOKEN: config.DISCORD_BOT_TOKEN || "",
CONFLUENCE_BASE_URL: config.CONFLUENCE_BASE_URL || "",
CONFLUENCE_EMAIL: config.CONFLUENCE_EMAIL || "",
CONFLUENCE_API_TOKEN: config.CONFLUENCE_API_TOKEN || "",
JIRA_BASE_URL: config.JIRA_BASE_URL || "",
JIRA_EMAIL: config.JIRA_EMAIL || "",
JIRA_API_TOKEN: config.JIRA_API_TOKEN || "",
});
if (currentConnector.connector_type === "GITHUB_CONNECTOR") {
const savedRepos = config.repo_full_names || [];
const savedPat = config.GITHUB_PAT || "";
setCurrentSelectedRepos(savedRepos);
setNewSelectedRepos(savedRepos);
setOriginalPat(savedPat);
patForm.reset({ github_pat: savedPat });
setEditMode("viewing");
}
}, [connectorId, connectors, connectorsLoading, router, searchSpaceId, connector, editForm, patForm]);
} else {
toast.error("Connector not found.");
router.push(`/dashboard/${searchSpaceId}/connectors`);
}
}
}, [
connectorId,
connectors,
connectorsLoading,
router,
searchSpaceId,
connector,
editForm,
patForm,
]);
// Handlers managed by the hook
const handleFetchRepositories = useCallback(async (values: GithubPatFormValues) => {
setIsFetchingRepos(true);
setFetchedRepos(null);
try {
const token = localStorage.getItem('surfsense_bearer_token');
if (!token) throw new Error('No auth token');
const response = await fetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/github/repositories/`,
{ method: 'POST', headers: { 'Content-Type': 'application/json', 'Authorization': `Bearer ${token}` }, body: JSON.stringify({ github_pat: values.github_pat }) }
// Handlers managed by the hook
const handleFetchRepositories = useCallback(
async (values: GithubPatFormValues) => {
setIsFetchingRepos(true);
setFetchedRepos(null);
try {
const token = localStorage.getItem("surfsense_bearer_token");
if (!token) throw new Error("No auth token");
const response = await fetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/github/repositories/`,
{
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${token}`,
},
body: JSON.stringify({ github_pat: values.github_pat }),
},
);
if (!response.ok) {
const err = await response.json();
throw new Error(err.detail || "Fetch failed");
}
const data: GithubRepo[] = await response.json();
setFetchedRepos(data);
setNewSelectedRepos(currentSelectedRepos);
toast.success(`Found ${data.length} repos.`);
} catch (error) {
console.error("Error fetching GitHub repositories:", error);
toast.error(
error instanceof Error
? error.message
: "Failed to fetch repositories.",
);
} finally {
setIsFetchingRepos(false);
}
},
[currentSelectedRepos],
); // Added dependency
const handleRepoSelectionChange = useCallback(
(repoFullName: string, checked: boolean) => {
setNewSelectedRepos((prev) =>
checked
? [...prev, repoFullName]
: prev.filter((name) => name !== repoFullName),
);
},
[],
);
const handleSaveChanges = useCallback(
async (formData: EditConnectorFormValues) => {
if (!connector || !originalConfig) return;
setIsSaving(true);
const updatePayload: Partial<SearchSourceConnector> = {};
let configChanged = false;
let newConfig: Record<string, any> | null = null;
if (formData.name !== connector.name) {
updatePayload.name = formData.name;
}
switch (connector.connector_type) {
case "GITHUB_CONNECTOR": {
const currentPatInForm = patForm.getValues("github_pat");
const patChanged = currentPatInForm !== originalPat;
const initialRepoSet = new Set(currentSelectedRepos);
const newRepoSet = new Set(newSelectedRepos);
const reposChanged =
initialRepoSet.size !== newRepoSet.size ||
![...initialRepoSet].every((repo) => newRepoSet.has(repo));
if (
patChanged ||
(editMode === "editing_repos" &&
reposChanged &&
fetchedRepos !== null)
) {
if (
!currentPatInForm ||
!(
currentPatInForm.startsWith("ghp_") ||
currentPatInForm.startsWith("github_pat_")
)
) {
toast.error("Invalid GitHub PAT format. Cannot save.");
setIsSaving(false);
return;
}
newConfig = {
GITHUB_PAT: currentPatInForm,
repo_full_names: newSelectedRepos,
};
if (reposChanged && newSelectedRepos.length === 0) {
toast.warning("Warning: No repositories selected.");
}
}
break;
}
case "SLACK_CONNECTOR":
if (formData.SLACK_BOT_TOKEN !== originalConfig.SLACK_BOT_TOKEN) {
if (!formData.SLACK_BOT_TOKEN) {
toast.error("Slack Token empty.");
setIsSaving(false);
return;
}
newConfig = { SLACK_BOT_TOKEN: formData.SLACK_BOT_TOKEN };
}
break;
case "NOTION_CONNECTOR":
if (
formData.NOTION_INTEGRATION_TOKEN !==
originalConfig.NOTION_INTEGRATION_TOKEN
) {
if (!formData.NOTION_INTEGRATION_TOKEN) {
toast.error("Notion Token empty.");
setIsSaving(false);
return;
}
newConfig = {
NOTION_INTEGRATION_TOKEN: formData.NOTION_INTEGRATION_TOKEN,
};
}
break;
case "SERPER_API":
if (formData.SERPER_API_KEY !== originalConfig.SERPER_API_KEY) {
if (!formData.SERPER_API_KEY) {
toast.error("Serper Key empty.");
setIsSaving(false);
return;
}
newConfig = { SERPER_API_KEY: formData.SERPER_API_KEY };
}
break;
case "TAVILY_API":
if (formData.TAVILY_API_KEY !== originalConfig.TAVILY_API_KEY) {
if (!formData.TAVILY_API_KEY) {
toast.error("Tavily Key empty.");
setIsSaving(false);
return;
}
newConfig = { TAVILY_API_KEY: formData.TAVILY_API_KEY };
}
break;
case "LINEAR_CONNECTOR":
if (formData.LINEAR_API_KEY !== originalConfig.LINEAR_API_KEY) {
if (!formData.LINEAR_API_KEY) {
toast.error("Linear API Key cannot be empty.");
setIsSaving(false);
return;
}
newConfig = { LINEAR_API_KEY: formData.LINEAR_API_KEY };
}
break;
case "LINKUP_API":
if (formData.LINKUP_API_KEY !== originalConfig.LINKUP_API_KEY) {
if (!formData.LINKUP_API_KEY) {
toast.error("Linkup API Key cannot be empty.");
setIsSaving(false);
return;
}
newConfig = { LINKUP_API_KEY: formData.LINKUP_API_KEY };
}
break;
case "DISCORD_CONNECTOR":
if (formData.DISCORD_BOT_TOKEN !== originalConfig.DISCORD_BOT_TOKEN) {
if (!formData.DISCORD_BOT_TOKEN) {
toast.error("Discord Bot Token cannot be empty.");
setIsSaving(false);
return;
}
newConfig = { DISCORD_BOT_TOKEN: formData.DISCORD_BOT_TOKEN };
}
break;
case "CONFLUENCE_CONNECTOR":
if (
formData.CONFLUENCE_BASE_URL !==
originalConfig.CONFLUENCE_BASE_URL ||
formData.CONFLUENCE_EMAIL !== originalConfig.CONFLUENCE_EMAIL ||
formData.CONFLUENCE_API_TOKEN !==
originalConfig.CONFLUENCE_API_TOKEN
) {
if (
!formData.CONFLUENCE_BASE_URL ||
!formData.CONFLUENCE_EMAIL ||
!formData.CONFLUENCE_API_TOKEN
) {
toast.error("All Confluence fields are required.");
setIsSaving(false);
return;
}
newConfig = {
CONFLUENCE_BASE_URL: formData.CONFLUENCE_BASE_URL,
CONFLUENCE_EMAIL: formData.CONFLUENCE_EMAIL,
CONFLUENCE_API_TOKEN: formData.CONFLUENCE_API_TOKEN,
};
}
break;
case "JIRA_CONNECTOR":
if (
formData.JIRA_BASE_URL !== originalConfig.JIRA_BASE_URL ||
formData.JIRA_EMAIL !== originalConfig.JIRA_EMAIL ||
formData.JIRA_API_TOKEN !== originalConfig.JIRA_API_TOKEN
) {
if (
!formData.JIRA_BASE_URL ||
!formData.JIRA_EMAIL ||
!formData.JIRA_API_TOKEN
) {
toast.error("All Jira fields are required.");
setIsSaving(false);
return;
}
newConfig = {
JIRA_BASE_URL: formData.JIRA_BASE_URL,
JIRA_EMAIL: formData.JIRA_EMAIL,
JIRA_API_TOKEN: formData.JIRA_API_TOKEN,
};
}
break;
}
if (newConfig !== null) {
updatePayload.config = newConfig;
configChanged = true;
}
if (Object.keys(updatePayload).length === 0) {
toast.info("No changes detected.");
setIsSaving(false);
if (connector.connector_type === "GITHUB_CONNECTOR") {
setEditMode("viewing");
patForm.reset({ github_pat: originalPat });
}
return;
}
try {
await updateConnector(connectorId, updatePayload);
toast.success("Connector updated!");
const newlySavedConfig = updatePayload.config || originalConfig;
setOriginalConfig(newlySavedConfig);
if (updatePayload.name) {
setConnector((prev) =>
prev
? { ...prev, name: updatePayload.name!, config: newlySavedConfig }
: null,
);
}
if (configChanged) {
if (connector.connector_type === "GITHUB_CONNECTOR") {
const savedGitHubConfig = newlySavedConfig as {
GITHUB_PAT?: string;
repo_full_names?: string[];
};
setCurrentSelectedRepos(savedGitHubConfig.repo_full_names || []);
setOriginalPat(savedGitHubConfig.GITHUB_PAT || "");
setNewSelectedRepos(savedGitHubConfig.repo_full_names || []);
patForm.reset({ github_pat: savedGitHubConfig.GITHUB_PAT || "" });
} else if (connector.connector_type === "SLACK_CONNECTOR") {
editForm.setValue(
"SLACK_BOT_TOKEN",
newlySavedConfig.SLACK_BOT_TOKEN || "",
);
if (!response.ok) { const err = await response.json(); throw new Error(err.detail || 'Fetch failed'); }
const data: GithubRepo[] = await response.json();
setFetchedRepos(data);
setNewSelectedRepos(currentSelectedRepos);
toast.success(`Found ${data.length} repos.`);
} catch (error) {
console.error("Error fetching GitHub repositories:", error);
toast.error(error instanceof Error ? error.message : "Failed to fetch repositories.");
} finally { setIsFetchingRepos(false); }
}, [currentSelectedRepos]); // Added dependency
const handleRepoSelectionChange = useCallback((repoFullName: string, checked: boolean) => {
setNewSelectedRepos(prev => checked ? [...prev, repoFullName] : prev.filter(name => name !== repoFullName));
}, []);
const handleSaveChanges = useCallback(async (formData: EditConnectorFormValues) => {
if (!connector || !originalConfig) return;
setIsSaving(true);
const updatePayload: Partial<SearchSourceConnector> = {};
let configChanged = false;
let newConfig: Record<string, any> | null = null;
if (formData.name !== connector.name) {
updatePayload.name = formData.name;
} else if (connector.connector_type === "NOTION_CONNECTOR") {
editForm.setValue(
"NOTION_INTEGRATION_TOKEN",
newlySavedConfig.NOTION_INTEGRATION_TOKEN || "",
);
} else if (connector.connector_type === "SERPER_API") {
editForm.setValue(
"SERPER_API_KEY",
newlySavedConfig.SERPER_API_KEY || "",
);
} else if (connector.connector_type === "TAVILY_API") {
editForm.setValue(
"TAVILY_API_KEY",
newlySavedConfig.TAVILY_API_KEY || "",
);
} else if (connector.connector_type === "LINEAR_CONNECTOR") {
editForm.setValue(
"LINEAR_API_KEY",
newlySavedConfig.LINEAR_API_KEY || "",
);
} else if (connector.connector_type === "LINKUP_API") {
editForm.setValue(
"LINKUP_API_KEY",
newlySavedConfig.LINKUP_API_KEY || "",
);
} else if (connector.connector_type === "DISCORD_CONNECTOR") {
editForm.setValue(
"DISCORD_BOT_TOKEN",
newlySavedConfig.DISCORD_BOT_TOKEN || "",
);
} else if (connector.connector_type === "CONFLUENCE_CONNECTOR") {
editForm.setValue(
"CONFLUENCE_BASE_URL",
newlySavedConfig.CONFLUENCE_BASE_URL || "",
);
editForm.setValue(
"CONFLUENCE_EMAIL",
newlySavedConfig.CONFLUENCE_EMAIL || "",
);
editForm.setValue(
"CONFLUENCE_API_TOKEN",
newlySavedConfig.CONFLUENCE_API_TOKEN || "",
);
} else if (connector.connector_type === "JIRA_CONNECTOR") {
editForm.setValue(
"JIRA_BASE_URL",
newlySavedConfig.JIRA_BASE_URL || "",
);
editForm.setValue("JIRA_EMAIL", newlySavedConfig.JIRA_EMAIL || "");
editForm.setValue(
"JIRA_API_TOKEN",
newlySavedConfig.JIRA_API_TOKEN || "",
);
}
}
switch (connector.connector_type) {
case 'GITHUB_CONNECTOR':
const currentPatInForm = patForm.getValues('github_pat');
const patChanged = currentPatInForm !== originalPat;
const initialRepoSet = new Set(currentSelectedRepos);
const newRepoSet = new Set(newSelectedRepos);
const reposChanged = initialRepoSet.size !== newRepoSet.size || ![...initialRepoSet].every(repo => newRepoSet.has(repo));
if (patChanged || (editMode === 'editing_repos' && reposChanged && fetchedRepos !== null)) {
if (!currentPatInForm || !(currentPatInForm.startsWith('ghp_') || currentPatInForm.startsWith('github_pat_'))) {
toast.error("Invalid GitHub PAT format. Cannot save."); setIsSaving(false); return;
}
newConfig = { GITHUB_PAT: currentPatInForm, repo_full_names: newSelectedRepos };
if (reposChanged && newSelectedRepos.length === 0) { toast.warning("Warning: No repositories selected."); }
}
break;
case 'SLACK_CONNECTOR':
if (formData.SLACK_BOT_TOKEN !== originalConfig.SLACK_BOT_TOKEN) {
if (!formData.SLACK_BOT_TOKEN) { toast.error("Slack Token empty."); setIsSaving(false); return; }
newConfig = { SLACK_BOT_TOKEN: formData.SLACK_BOT_TOKEN };
}
break;
case 'NOTION_CONNECTOR':
if (formData.NOTION_INTEGRATION_TOKEN !== originalConfig.NOTION_INTEGRATION_TOKEN) {
if (!formData.NOTION_INTEGRATION_TOKEN) { toast.error("Notion Token empty."); setIsSaving(false); return; }
newConfig = { NOTION_INTEGRATION_TOKEN: formData.NOTION_INTEGRATION_TOKEN };
}
break;
case 'SERPER_API':
if (formData.SERPER_API_KEY !== originalConfig.SERPER_API_KEY) {
if (!formData.SERPER_API_KEY) { toast.error("Serper Key empty."); setIsSaving(false); return; }
newConfig = { SERPER_API_KEY: formData.SERPER_API_KEY };
}
break;
case 'TAVILY_API':
if (formData.TAVILY_API_KEY !== originalConfig.TAVILY_API_KEY) {
if (!formData.TAVILY_API_KEY) { toast.error("Tavily Key empty."); setIsSaving(false); return; }
newConfig = { TAVILY_API_KEY: formData.TAVILY_API_KEY };
}
break;
case 'LINEAR_CONNECTOR':
if (formData.LINEAR_API_KEY !== originalConfig.LINEAR_API_KEY) {
if (!formData.LINEAR_API_KEY) {
toast.error("Linear API Key cannot be empty.");
setIsSaving(false);
return;
}
newConfig = { LINEAR_API_KEY: formData.LINEAR_API_KEY };
}
break;
case 'LINKUP_API':
if (formData.LINKUP_API_KEY !== originalConfig.LINKUP_API_KEY) {
if (!formData.LINKUP_API_KEY) { toast.error("Linkup API Key cannot be empty."); setIsSaving(false); return; }
newConfig = { LINKUP_API_KEY: formData.LINKUP_API_KEY };
}
break;
case 'DISCORD_CONNECTOR':
if (formData.DISCORD_BOT_TOKEN !== originalConfig.DISCORD_BOT_TOKEN) {
if (!formData.DISCORD_BOT_TOKEN) { toast.error("Discord Bot Token cannot be empty."); setIsSaving(false); return; }
newConfig = { DISCORD_BOT_TOKEN: formData.DISCORD_BOT_TOKEN };
}
break;
if (connector.connector_type === "GITHUB_CONNECTOR") {
setEditMode("viewing");
setFetchedRepos(null);
}
// Resetting simple form values is handled by useEffect if connector state updates
} catch (error) {
console.error("Error updating connector:", error);
toast.error(
error instanceof Error
? error.message
: "Failed to update connector.",
);
} finally {
setIsSaving(false);
}
},
[
connector,
originalConfig,
updateConnector,
connectorId,
patForm,
originalPat,
currentSelectedRepos,
newSelectedRepos,
editMode,
fetchedRepos,
editForm,
],
); // Added editForm to dependencies
if (newConfig !== null) {
updatePayload.config = newConfig;
configChanged = true;
}
if (Object.keys(updatePayload).length === 0) {
toast.info("No changes detected.");
setIsSaving(false);
if (connector.connector_type === 'GITHUB_CONNECTOR') { setEditMode('viewing'); patForm.reset({ github_pat: originalPat }); }
return;
}
try {
await updateConnector(connectorId, updatePayload);
toast.success("Connector updated!");
const newlySavedConfig = updatePayload.config || originalConfig;
setOriginalConfig(newlySavedConfig);
if (updatePayload.name) {
setConnector(prev => prev ? { ...prev, name: updatePayload.name!, config: newlySavedConfig } : null);
}
if (configChanged) {
if (connector.connector_type === 'GITHUB_CONNECTOR') {
const savedGitHubConfig = newlySavedConfig as { GITHUB_PAT?: string; repo_full_names?: string[] };
setCurrentSelectedRepos(savedGitHubConfig.repo_full_names || []);
setOriginalPat(savedGitHubConfig.GITHUB_PAT || "");
setNewSelectedRepos(savedGitHubConfig.repo_full_names || []);
patForm.reset({ github_pat: savedGitHubConfig.GITHUB_PAT || "" });
} else if(connector.connector_type === 'SLACK_CONNECTOR') {
editForm.setValue('SLACK_BOT_TOKEN', newlySavedConfig.SLACK_BOT_TOKEN || "");
} else if(connector.connector_type === 'NOTION_CONNECTOR') {
editForm.setValue('NOTION_INTEGRATION_TOKEN', newlySavedConfig.NOTION_INTEGRATION_TOKEN || "");
} else if(connector.connector_type === 'SERPER_API') {
editForm.setValue('SERPER_API_KEY', newlySavedConfig.SERPER_API_KEY || "");
} else if(connector.connector_type === 'TAVILY_API') {
editForm.setValue('TAVILY_API_KEY', newlySavedConfig.TAVILY_API_KEY || "");
} else if(connector.connector_type === 'LINEAR_CONNECTOR') {
editForm.setValue('LINEAR_API_KEY', newlySavedConfig.LINEAR_API_KEY || "");
} else if(connector.connector_type === 'LINKUP_API') {
editForm.setValue('LINKUP_API_KEY', newlySavedConfig.LINKUP_API_KEY || "");
} else if(connector.connector_type === 'DISCORD_CONNECTOR') {
editForm.setValue('DISCORD_BOT_TOKEN', newlySavedConfig.DISCORD_BOT_TOKEN || "");
}
}
if (connector.connector_type === 'GITHUB_CONNECTOR') {
setEditMode('viewing');
setFetchedRepos(null);
}
// Resetting simple form values is handled by useEffect if connector state updates
} catch (error) {
console.error("Error updating connector:", error);
toast.error(error instanceof Error ? error.message : "Failed to update connector.");
} finally { setIsSaving(false); }
}, [connector, originalConfig, updateConnector, connectorId, patForm, originalPat, currentSelectedRepos, newSelectedRepos, editMode, fetchedRepos, editForm]); // Added editForm to dependencies
// Return values needed by the component
return {
connectorsLoading,
connector,
isSaving,
editForm,
patForm,
handleSaveChanges,
// GitHub specific props
editMode,
setEditMode,
originalPat,
currentSelectedRepos,
fetchedRepos,
setFetchedRepos,
newSelectedRepos,
setNewSelectedRepos,
isFetchingRepos,
handleFetchRepositories,
handleRepoSelectionChange,
};
}
// Return values needed by the component
return {
connectorsLoading,
connector,
isSaving,
editForm,
patForm,
handleSaveChanges,
// GitHub specific props
editMode,
setEditMode,
originalPat,
currentSelectedRepos,
fetchedRepos,
setFetchedRepos,
newSelectedRepos,
setNewSelectedRepos,
isFetchingRepos,
handleFetchRepositories,
handleRepoSelectionChange,
};
}