mirror of
https://github.com/rcourtman/Pulse.git
synced 2026-05-07 08:57:12 +00:00
1366 lines
39 KiB
Go
1366 lines
39 KiB
Go
package api
|
|
|
|
import (
|
|
"context"
|
|
"encoding/json"
|
|
"net/http"
|
|
"strconv"
|
|
"strings"
|
|
"sync"
|
|
"sync/atomic"
|
|
"time"
|
|
|
|
"fmt"
|
|
|
|
"github.com/rcourtman/pulse-go-rewrite/internal/agentexec"
|
|
"github.com/rcourtman/pulse-go-rewrite/internal/ai/approval"
|
|
"github.com/rcourtman/pulse-go-rewrite/internal/ai/chat"
|
|
"github.com/rcourtman/pulse-go-rewrite/internal/ai/tools"
|
|
"github.com/rcourtman/pulse-go-rewrite/internal/ai/unified"
|
|
"github.com/rcourtman/pulse-go-rewrite/internal/config"
|
|
"github.com/rcourtman/pulse-go-rewrite/internal/monitoring"
|
|
recoverymanager "github.com/rcourtman/pulse-go-rewrite/internal/recovery/manager"
|
|
"github.com/rcourtman/pulse-go-rewrite/internal/unifiedresources"
|
|
"github.com/rs/zerolog/log"
|
|
)
|
|
|
|
// AIPersistence interface for loading/saving AI config
|
|
type AIPersistence interface {
|
|
LoadAIConfig() (*config.AIConfig, error)
|
|
DataDir() string
|
|
}
|
|
|
|
// AIService interface for the AI chat service - enables mocking in tests
|
|
type AIService interface {
|
|
Start(ctx context.Context) error
|
|
Stop(ctx context.Context) error
|
|
Restart(ctx context.Context, newCfg *config.AIConfig) error
|
|
IsRunning() bool
|
|
Execute(ctx context.Context, req chat.ExecuteRequest) (map[string]interface{}, error)
|
|
ExecuteStream(ctx context.Context, req chat.ExecuteRequest, callback chat.StreamCallback) error
|
|
ListSessions(ctx context.Context) ([]chat.Session, error)
|
|
CreateSession(ctx context.Context) (*chat.Session, error)
|
|
DeleteSession(ctx context.Context, sessionID string) error
|
|
GetMessages(ctx context.Context, sessionID string) ([]chat.Message, error)
|
|
AbortSession(ctx context.Context, sessionID string) error
|
|
SummarizeSession(ctx context.Context, sessionID string) (map[string]interface{}, error)
|
|
GetSessionDiff(ctx context.Context, sessionID string) (map[string]interface{}, error)
|
|
ForkSession(ctx context.Context, sessionID string) (*chat.Session, error)
|
|
RevertSession(ctx context.Context, sessionID string) (map[string]interface{}, error)
|
|
UnrevertSession(ctx context.Context, sessionID string) (map[string]interface{}, error)
|
|
AnswerQuestion(ctx context.Context, questionID string, answers []chat.QuestionAnswer) error
|
|
SetAlertProvider(provider chat.MCPAlertProvider)
|
|
SetFindingsProvider(provider chat.MCPFindingsProvider)
|
|
SetBaselineProvider(provider chat.MCPBaselineProvider)
|
|
SetPatternProvider(provider chat.MCPPatternProvider)
|
|
SetMetricsHistory(provider chat.MCPMetricsHistoryProvider)
|
|
SetAgentProfileManager(manager chat.AgentProfileManager)
|
|
SetGuestConfigProvider(provider chat.MCPGuestConfigProvider)
|
|
SetAppContainerConfigProvider(provider chat.MCPAppContainerConfigProvider)
|
|
SetBackupProvider(provider chat.MCPBackupProvider)
|
|
SetDiskHealthProvider(provider chat.MCPDiskHealthProvider)
|
|
SetUpdatesProvider(provider chat.MCPUpdatesProvider)
|
|
SetFindingsManager(manager chat.FindingsManager)
|
|
SetMetadataUpdater(updater chat.MetadataUpdater)
|
|
SetKnowledgeStoreProvider(provider chat.KnowledgeStoreProvider)
|
|
SetIncidentRecorderProvider(provider chat.IncidentRecorderProvider)
|
|
SetEventCorrelatorProvider(provider chat.EventCorrelatorProvider)
|
|
SetDiscoveryProvider(provider chat.MCPDiscoveryProvider)
|
|
SetUnifiedResourceProvider(provider chat.MCPUnifiedResourceProvider)
|
|
SetAppContainerActionProvider(provider chat.MCPAppContainerActionProvider)
|
|
SetAppContainerReadProvider(provider chat.MCPAppContainerReadProvider)
|
|
UpdateControlSettings(cfg *config.AIConfig)
|
|
GetBaseURL() string
|
|
}
|
|
|
|
// AIHandler handles all AI endpoints using direct AI integration
|
|
type AIHandler struct {
|
|
stateMu sync.RWMutex
|
|
approvalStoreMu sync.Mutex
|
|
mtPersistence *config.MultiTenantPersistence
|
|
mtMonitor *monitoring.MultiTenantMonitor
|
|
defaultConfig *config.Config
|
|
defaultPersistence AIPersistence
|
|
hostedMode bool
|
|
defaultService AIService
|
|
agentServer *agentexec.Server
|
|
services map[string]AIService
|
|
servicesMu sync.RWMutex
|
|
serviceInitMu sync.RWMutex
|
|
serviceInit func(ctx context.Context, svc AIService)
|
|
defaultMonitor *monitoring.Monitor
|
|
unifiedStoreMu sync.RWMutex
|
|
unifiedStore *unified.UnifiedStore
|
|
unifiedStores map[string]*unified.UnifiedStore
|
|
readState unifiedresources.ReadState
|
|
recoveryManager *recoverymanager.Manager
|
|
approvalStore *approval.Store
|
|
approvalStoreDir string
|
|
approvalStoreStop context.CancelFunc
|
|
}
|
|
|
|
// newChatService is the factory function for creating the AI service.
|
|
// Can be swapped in tests for mocking.
|
|
var newChatService = func(cfg chat.Config) AIService {
|
|
return chat.NewService(cfg)
|
|
}
|
|
|
|
// NewAIHandler creates a new AI handler
|
|
func NewAIHandler(mtp *config.MultiTenantPersistence, mtm *monitoring.MultiTenantMonitor, agentServer *agentexec.Server) *AIHandler {
|
|
var defaultConfig *config.Config
|
|
var defaultPersistence AIPersistence
|
|
|
|
if mtm != nil {
|
|
if m, err := mtm.GetMonitor("default"); err == nil && m != nil {
|
|
defaultConfig = m.GetConfig()
|
|
}
|
|
}
|
|
if mtp != nil {
|
|
if p, err := mtp.GetPersistence("default"); err == nil {
|
|
defaultPersistence = p
|
|
}
|
|
}
|
|
|
|
return &AIHandler{
|
|
mtPersistence: mtp,
|
|
mtMonitor: mtm,
|
|
defaultConfig: defaultConfig,
|
|
defaultPersistence: defaultPersistence,
|
|
hostedMode: hostedModeEnabledFromEnv(),
|
|
agentServer: agentServer,
|
|
services: make(map[string]AIService),
|
|
unifiedStores: make(map[string]*unified.UnifiedStore),
|
|
}
|
|
}
|
|
|
|
func (h *AIHandler) stateRefs() (
|
|
*config.MultiTenantPersistence,
|
|
*monitoring.MultiTenantMonitor,
|
|
*config.Config,
|
|
AIPersistence,
|
|
unifiedresources.ReadState,
|
|
*recoverymanager.Manager,
|
|
) {
|
|
h.stateMu.RLock()
|
|
defer h.stateMu.RUnlock()
|
|
return h.mtPersistence, h.mtMonitor, h.defaultConfig, h.defaultPersistence, h.readState, h.recoveryManager
|
|
}
|
|
|
|
func (h *AIHandler) getDefaultService() AIService {
|
|
if h == nil {
|
|
return nil
|
|
}
|
|
h.servicesMu.RLock()
|
|
defer h.servicesMu.RUnlock()
|
|
return h.defaultService
|
|
}
|
|
|
|
func normalizeAIChatOrgID(orgID string) string {
|
|
orgID = strings.TrimSpace(orgID)
|
|
if orgID == "" {
|
|
return "default"
|
|
}
|
|
return orgID
|
|
}
|
|
|
|
// SetUnifiedStore sets the unified store for finding context lookup in the default org "Discuss" flow.
|
|
func (h *AIHandler) SetUnifiedStore(store *unified.UnifiedStore) {
|
|
h.SetUnifiedStoreForOrg("default", store)
|
|
}
|
|
|
|
// SetUnifiedStoreForOrg sets the unified store for finding context lookup in an org-specific "Discuss" flow.
|
|
func (h *AIHandler) SetUnifiedStoreForOrg(orgID string, store *unified.UnifiedStore) {
|
|
orgID = normalizeAIChatOrgID(orgID)
|
|
h.unifiedStoreMu.Lock()
|
|
if h.unifiedStores == nil {
|
|
h.unifiedStores = make(map[string]*unified.UnifiedStore)
|
|
}
|
|
if store == nil {
|
|
delete(h.unifiedStores, orgID)
|
|
} else {
|
|
h.unifiedStores[orgID] = store
|
|
}
|
|
if orgID == "default" {
|
|
h.unifiedStore = store
|
|
}
|
|
h.unifiedStoreMu.Unlock()
|
|
}
|
|
|
|
// GetUnifiedStoreForOrg returns the unified store for finding context lookup for a specific org.
|
|
func (h *AIHandler) GetUnifiedStoreForOrg(orgID string) *unified.UnifiedStore {
|
|
if h == nil {
|
|
return nil
|
|
}
|
|
orgID = normalizeAIChatOrgID(orgID)
|
|
h.unifiedStoreMu.RLock()
|
|
if h.unifiedStores != nil {
|
|
if store := h.unifiedStores[orgID]; store != nil {
|
|
h.unifiedStoreMu.RUnlock()
|
|
return store
|
|
}
|
|
}
|
|
store := h.unifiedStore
|
|
h.unifiedStoreMu.RUnlock()
|
|
if orgID == "default" {
|
|
return store
|
|
}
|
|
return nil
|
|
}
|
|
|
|
// SetReadState stores a unified read-state provider for injection into newly created chat services.
|
|
func (h *AIHandler) SetReadState(rs unifiedresources.ReadState) {
|
|
if h == nil {
|
|
return
|
|
}
|
|
h.stateMu.Lock()
|
|
defer h.stateMu.Unlock()
|
|
h.readState = rs
|
|
}
|
|
|
|
// SetRecoveryManager stores a recovery manager for injection into newly created chat services.
|
|
func (h *AIHandler) SetRecoveryManager(manager *recoverymanager.Manager) {
|
|
if h == nil {
|
|
return
|
|
}
|
|
h.stateMu.Lock()
|
|
defer h.stateMu.Unlock()
|
|
h.recoveryManager = manager
|
|
}
|
|
|
|
func (h *AIHandler) applyServiceInitializer(ctx context.Context, svc AIService) {
|
|
if h == nil || svc == nil {
|
|
return
|
|
}
|
|
|
|
h.serviceInitMu.RLock()
|
|
initializer := h.serviceInit
|
|
h.serviceInitMu.RUnlock()
|
|
if initializer == nil {
|
|
return
|
|
}
|
|
if ctx == nil {
|
|
ctx = context.Background()
|
|
}
|
|
|
|
initializer(ctx, svc)
|
|
}
|
|
|
|
// SetServiceInitializer configures an initializer that runs whenever a chat
|
|
// service is returned or created, allowing router-level org-specific wiring.
|
|
func (h *AIHandler) SetServiceInitializer(initializer func(ctx context.Context, svc AIService)) {
|
|
if h == nil {
|
|
return
|
|
}
|
|
|
|
h.serviceInitMu.Lock()
|
|
h.serviceInit = initializer
|
|
h.serviceInitMu.Unlock()
|
|
|
|
if initializer == nil {
|
|
return
|
|
}
|
|
|
|
orgServices := make(map[string]AIService)
|
|
h.servicesMu.RLock()
|
|
defaultSvc := h.defaultService
|
|
for orgID, svc := range h.services {
|
|
if svc != nil {
|
|
orgServices[orgID] = svc
|
|
}
|
|
}
|
|
h.servicesMu.RUnlock()
|
|
|
|
if defaultSvc != nil {
|
|
defaultCtx := context.WithValue(context.Background(), OrgIDContextKey, "default")
|
|
initializer(defaultCtx, defaultSvc)
|
|
}
|
|
for orgID, svc := range orgServices {
|
|
ctx := context.WithValue(context.Background(), OrgIDContextKey, orgID)
|
|
initializer(ctx, svc)
|
|
}
|
|
}
|
|
|
|
// GetService returns the AI service for the current context
|
|
func (h *AIHandler) GetService(ctx context.Context) AIService {
|
|
orgID := GetOrgID(ctx)
|
|
if orgID == "default" || orgID == "" {
|
|
svc := h.getDefaultService()
|
|
if svc != nil {
|
|
defaultCtx := ctx
|
|
if strings.TrimSpace(GetOrgID(defaultCtx)) == "" {
|
|
defaultCtx = context.WithValue(context.Background(), OrgIDContextKey, "default")
|
|
}
|
|
h.applyServiceInitializer(defaultCtx, svc)
|
|
}
|
|
return svc
|
|
}
|
|
|
|
h.servicesMu.RLock()
|
|
svc, exists := h.services[orgID]
|
|
h.servicesMu.RUnlock()
|
|
|
|
if exists {
|
|
h.applyServiceInitializer(ctx, svc)
|
|
return svc
|
|
}
|
|
|
|
h.servicesMu.Lock()
|
|
defer h.servicesMu.Unlock()
|
|
|
|
// Double check
|
|
if svc, exists = h.services[orgID]; exists {
|
|
return svc
|
|
}
|
|
|
|
// Create and start service for this tenant
|
|
svc = h.initTenantService(ctx, orgID)
|
|
if svc != nil {
|
|
h.applyServiceInitializer(ctx, svc)
|
|
h.services[orgID] = svc
|
|
}
|
|
return svc
|
|
}
|
|
|
|
// RemoveTenantService stops and removes the AI service for a specific tenant.
|
|
// This should be called when a tenant is offboarded to free resources.
|
|
func (h *AIHandler) RemoveTenantService(ctx context.Context, orgID string) error {
|
|
orgID = normalizeAIChatOrgID(orgID)
|
|
if orgID == "default" {
|
|
return nil // Don't remove the default-org service.
|
|
}
|
|
|
|
// Clear org-scoped finding context store even if the chat service was never created.
|
|
h.SetUnifiedStoreForOrg(orgID, nil)
|
|
|
|
h.servicesMu.Lock()
|
|
defer h.servicesMu.Unlock()
|
|
|
|
svc, exists := h.services[orgID]
|
|
if !exists {
|
|
return nil // Nothing to remove
|
|
}
|
|
|
|
if svc != nil {
|
|
if err := svc.Stop(ctx); err != nil {
|
|
log.Warn().Str("orgID", orgID).Err(err).Msg("Error stopping AI service for removed tenant")
|
|
}
|
|
}
|
|
|
|
delete(h.services, orgID)
|
|
log.Info().Str("orgID", orgID).Msg("Removed AI service for tenant")
|
|
return nil
|
|
}
|
|
|
|
func (h *AIHandler) initTenantService(ctx context.Context, orgID string) AIService {
|
|
mtPersistence, mtMonitor, _, _, _, recoveryManager := h.stateRefs()
|
|
|
|
if mtPersistence == nil {
|
|
return nil
|
|
}
|
|
|
|
tenantCtx := context.WithValue(backgroundContext(ctx), OrgIDContextKey, orgID)
|
|
persistence, err := mtPersistence.GetPersistence(orgID)
|
|
if err != nil {
|
|
log.Warn().Str("orgID", orgID).Err(err).Msg("Failed to get persistence for AI service")
|
|
return nil
|
|
}
|
|
|
|
// Tenant chat startup must use the same hosted-aware config path as
|
|
// /api/settings/ai so hosted orgs do not race into a synthetic disabled/default config.
|
|
aiCfg := h.loadAIConfig(tenantCtx)
|
|
if aiCfg == nil {
|
|
log.Info().Str("orgID", orgID).Msg("AI config is nil for tenant service initialization")
|
|
return nil
|
|
}
|
|
if !aiCfg.Enabled {
|
|
log.Info().Str("orgID", orgID).Bool("enabled", aiCfg.Enabled).Msg("AI is disabled in tenant config")
|
|
return nil
|
|
}
|
|
|
|
dataDir := h.getDataDir(aiCfg, persistence.DataDir())
|
|
|
|
// Create chat config
|
|
chatCfg := chat.Config{
|
|
AIConfig: aiCfg,
|
|
DataDir: dataDir,
|
|
AgentServer: h.agentServer,
|
|
ReadState: h.readStateForOrg(orgID),
|
|
OrgID: orgID,
|
|
}
|
|
if recoveryManager != nil {
|
|
chatCfg.RecoveryPointsProvider = tools.NewRecoveryPointsMCPAdapter(recoveryManager, orgID)
|
|
}
|
|
|
|
// Get monitor for state provider
|
|
if mtMonitor != nil {
|
|
if m, err := mtMonitor.GetMonitor(orgID); err == nil && m != nil {
|
|
chatCfg.StateProvider = m
|
|
}
|
|
}
|
|
|
|
svc := newChatService(chatCfg)
|
|
if err := svc.Start(ctx); err != nil {
|
|
log.Error().Str("orgID", orgID).Err(err).Msg("Failed to start AI service for tenant")
|
|
return nil
|
|
}
|
|
|
|
return svc
|
|
}
|
|
|
|
func (h *AIHandler) getDataDir(aiCfg *config.AIConfig, baseDir string) string {
|
|
dataDir := baseDir
|
|
if dataDir == "" {
|
|
dataDir = "data"
|
|
}
|
|
return dataDir
|
|
}
|
|
|
|
func (h *AIHandler) readStateForOrg(orgID string) unifiedresources.ReadState {
|
|
if h == nil {
|
|
return nil
|
|
}
|
|
_, mtMonitor, _, _, fallbackReadState, _ := h.stateRefs()
|
|
orgID = strings.TrimSpace(orgID)
|
|
if orgID == "" {
|
|
orgID = "default"
|
|
}
|
|
|
|
if mtMonitor != nil {
|
|
if monitor, err := mtMonitor.GetMonitor(orgID); err == nil && monitor != nil {
|
|
if readState := monitor.GetUnifiedReadState(); readState != nil {
|
|
return readState
|
|
}
|
|
}
|
|
if orgID != "default" {
|
|
// Security: never fall back to default-org read state for non-default orgs.
|
|
return nil
|
|
}
|
|
}
|
|
|
|
return fallbackReadState
|
|
}
|
|
|
|
func (h *AIHandler) getConfig(ctx context.Context) *config.Config {
|
|
_, mtMonitor, defaultConfig, _, _, _ := h.stateRefs()
|
|
orgID := strings.TrimSpace(GetOrgID(ctx))
|
|
if orgID == "" || orgID == "default" {
|
|
return defaultConfig
|
|
}
|
|
if mtMonitor != nil {
|
|
if m, err := mtMonitor.GetMonitor(orgID); err == nil && m != nil {
|
|
return m.GetConfig()
|
|
}
|
|
// Security: never fall back to default config for non-default orgs.
|
|
return nil
|
|
}
|
|
return defaultConfig
|
|
}
|
|
|
|
func (h *AIHandler) getPersistence(ctx context.Context) AIPersistence {
|
|
mtPersistence, _, _, defaultPersistence, _, _ := h.stateRefs()
|
|
orgID := strings.TrimSpace(GetOrgID(ctx))
|
|
if orgID == "" || orgID == "default" {
|
|
return defaultPersistence
|
|
}
|
|
if mtPersistence != nil {
|
|
if p, err := mtPersistence.GetPersistence(orgID); err == nil && p != nil {
|
|
return p
|
|
}
|
|
// Security: never fall back to default persistence for non-default orgs.
|
|
return nil
|
|
}
|
|
return defaultPersistence
|
|
}
|
|
|
|
// loadAIConfig loads AI config for the current context
|
|
func (h *AIHandler) loadAIConfig(ctx context.Context) *config.AIConfig {
|
|
p := h.getPersistence(ctx)
|
|
if p == nil {
|
|
return nil
|
|
}
|
|
if persistence, ok := p.(*config.ConfigPersistence); ok {
|
|
billingBaseDir := persistence.DataDir()
|
|
orgID := strings.TrimSpace(GetOrgID(ctx))
|
|
if orgID == "" {
|
|
orgID = "default"
|
|
}
|
|
if h.mtPersistence != nil {
|
|
billingBaseDir = h.mtPersistence.BaseDataDir()
|
|
}
|
|
cfg, err := loadHostedAwareAIConfig(h.hostedMode, billingBaseDir, orgID, persistence)
|
|
if err == nil {
|
|
return cfg
|
|
}
|
|
log.Warn().Err(err).Str("org_id", orgID).Msg("Failed to load hosted-aware Pulse Assistant config")
|
|
}
|
|
cfg, err := p.LoadAIConfig()
|
|
if err != nil {
|
|
return nil
|
|
}
|
|
return cfg
|
|
}
|
|
|
|
// SetMultiTenantPersistence updates the persistence manager
|
|
func (h *AIHandler) SetMultiTenantPersistence(mtp *config.MultiTenantPersistence) {
|
|
h.stateMu.Lock()
|
|
defer h.stateMu.Unlock()
|
|
h.mtPersistence = mtp
|
|
}
|
|
|
|
// SetMultiTenantMonitor updates the monitor manager
|
|
func (h *AIHandler) SetMultiTenantMonitor(mtm *monitoring.MultiTenantMonitor) {
|
|
h.stateMu.Lock()
|
|
defer h.stateMu.Unlock()
|
|
h.mtMonitor = mtm
|
|
}
|
|
|
|
func (h *AIHandler) ensureApprovalStore(dataDir string) {
|
|
if strings.TrimSpace(dataDir) == "" {
|
|
return
|
|
}
|
|
|
|
h.approvalStoreMu.Lock()
|
|
defer h.approvalStoreMu.Unlock()
|
|
|
|
if h.approvalStore != nil && h.approvalStoreDir == dataDir {
|
|
approval.SetStore(h.approvalStore)
|
|
return
|
|
}
|
|
|
|
if h.approvalStoreStop != nil {
|
|
h.approvalStoreStop()
|
|
h.approvalStoreStop = nil
|
|
}
|
|
|
|
approvalStore, err := approval.NewStore(approval.StoreConfig{
|
|
DataDir: dataDir,
|
|
DefaultTimeout: 5 * time.Minute,
|
|
MaxApprovals: 100,
|
|
})
|
|
if err != nil {
|
|
h.approvalStore = nil
|
|
h.approvalStoreDir = ""
|
|
approval.SetStore(nil)
|
|
log.Warn().Err(err).Msg("Failed to create approval store, approvals will not be persisted")
|
|
return
|
|
}
|
|
|
|
cleanupCtx, cleanupCancel := context.WithCancel(context.Background())
|
|
approvalStore.StartCleanup(cleanupCtx)
|
|
approval.SetStore(approvalStore)
|
|
h.approvalStore = approvalStore
|
|
h.approvalStoreDir = dataDir
|
|
h.approvalStoreStop = cleanupCancel
|
|
log.Info().Str("data_dir", dataDir).Msg("Approval store initialized")
|
|
}
|
|
|
|
func (h *AIHandler) clearApprovalStore() {
|
|
h.approvalStoreMu.Lock()
|
|
defer h.approvalStoreMu.Unlock()
|
|
|
|
if h.approvalStoreStop != nil {
|
|
h.approvalStoreStop()
|
|
h.approvalStoreStop = nil
|
|
}
|
|
h.approvalStore = nil
|
|
h.approvalStoreDir = ""
|
|
approval.SetStore(nil)
|
|
}
|
|
|
|
// Start initializes and starts the AI chat service.
|
|
// The monitor parameter provides state snapshots to the chat service (satisfies chat.StateProvider).
|
|
func (h *AIHandler) Start(ctx context.Context, monitor *monitoring.Monitor) error {
|
|
log.Info().Msg("AIHandler.Start called")
|
|
aiCfg := h.loadAIConfig(ctx)
|
|
if aiCfg == nil {
|
|
log.Info().Msg("AI config is nil, AI is disabled")
|
|
return nil
|
|
}
|
|
if !aiCfg.Enabled {
|
|
log.Info().Bool("enabled", aiCfg.Enabled).Msg("AI is disabled in config")
|
|
return nil
|
|
}
|
|
|
|
// Determine data directory
|
|
persistence := h.getPersistence(ctx)
|
|
dataDir := h.getDataDir(aiCfg, persistence.DataDir())
|
|
|
|
orgID := GetOrgID(ctx)
|
|
if orgID == "" {
|
|
orgID = "default"
|
|
}
|
|
|
|
// Cache the monitor for use by Restart().
|
|
h.stateMu.Lock()
|
|
h.defaultMonitor = monitor
|
|
h.stateMu.Unlock()
|
|
|
|
// Create chat config
|
|
chatCfg := chat.Config{
|
|
AIConfig: aiCfg,
|
|
DataDir: dataDir,
|
|
StateProvider: monitor,
|
|
AgentServer: h.agentServer,
|
|
ReadState: h.readStateForOrg(orgID),
|
|
OrgID: orgID,
|
|
}
|
|
_, _, _, _, _, recoveryManager := h.stateRefs()
|
|
if recoveryManager != nil {
|
|
chatCfg.RecoveryPointsProvider = tools.NewRecoveryPointsMCPAdapter(recoveryManager, orgID)
|
|
}
|
|
|
|
svc := newChatService(chatCfg)
|
|
if err := svc.Start(ctx); err != nil {
|
|
return fmt.Errorf("start AI chat service: %w", err)
|
|
}
|
|
h.servicesMu.Lock()
|
|
h.defaultService = svc
|
|
h.servicesMu.Unlock()
|
|
h.applyServiceInitializer(context.WithValue(context.Background(), OrgIDContextKey, orgID), svc)
|
|
|
|
// Initialize approval store for command approval workflow.
|
|
h.ensureApprovalStore(dataDir)
|
|
|
|
log.Info().Msg("Pulse AI started (direct integration)")
|
|
return nil
|
|
}
|
|
|
|
// Stop stops the AI chat service
|
|
func (h *AIHandler) Stop(ctx context.Context) error {
|
|
defer h.clearApprovalStore()
|
|
if svc := h.getDefaultService(); svc != nil {
|
|
return svc.Stop(ctx)
|
|
}
|
|
return nil
|
|
}
|
|
|
|
// Restart restarts the AI chat service with updated configuration
|
|
// Call this when model or other settings change
|
|
func (h *AIHandler) Restart(ctx context.Context) error {
|
|
// Load fresh config from persistence to get latest settings
|
|
newCfg := h.loadAIConfig(ctx)
|
|
svc := h.getDefaultService()
|
|
|
|
if newCfg == nil || !newCfg.Enabled {
|
|
h.clearApprovalStore()
|
|
if svc == nil {
|
|
return nil
|
|
}
|
|
return svc.Restart(ctx, newCfg)
|
|
}
|
|
|
|
// If enabled but not started yet, recover the monitor and bootstrap now.
|
|
if svc == nil {
|
|
log.Info().Msg("Starting AI service via restart trigger")
|
|
|
|
h.stateMu.RLock()
|
|
m := h.defaultMonitor
|
|
mtm := h.mtMonitor
|
|
h.stateMu.RUnlock()
|
|
|
|
if m == nil && mtm != nil {
|
|
m, _ = mtm.GetMonitor("default")
|
|
}
|
|
|
|
return h.Start(ctx, m)
|
|
}
|
|
|
|
if !svc.IsRunning() {
|
|
log.Info().Msg("Starting AI service via restart trigger")
|
|
|
|
// Recover the monitor: prefer cached default-org monitor, fall back to mtMonitor.
|
|
h.stateMu.RLock()
|
|
m := h.defaultMonitor
|
|
mtm := h.mtMonitor
|
|
h.stateMu.RUnlock()
|
|
|
|
if m == nil && mtm != nil {
|
|
m, _ = mtm.GetMonitor("default")
|
|
}
|
|
|
|
// Reuse start logic
|
|
return h.Start(ctx, m)
|
|
}
|
|
|
|
if err := svc.Restart(ctx, newCfg); err != nil {
|
|
return err
|
|
}
|
|
|
|
persistence := h.getPersistence(ctx)
|
|
if persistence == nil {
|
|
return nil
|
|
}
|
|
dataDir := h.getDataDir(newCfg, persistence.DataDir())
|
|
h.ensureApprovalStore(dataDir)
|
|
|
|
return nil
|
|
}
|
|
|
|
// IsRunning returns whether AI is running
|
|
// GetAIConfig returns the current AI configuration
|
|
func (h *AIHandler) GetAIConfig(ctx context.Context) *config.AIConfig {
|
|
return h.loadAIConfig(ctx)
|
|
}
|
|
|
|
// IsRunning returns true if the AI chat service is running
|
|
func (h *AIHandler) IsRunning(ctx context.Context) bool {
|
|
svc := h.GetService(ctx)
|
|
return svc != nil && svc.IsRunning()
|
|
}
|
|
|
|
// ChatMention represents a resource tagged via @ mention in the chat UI
|
|
type ChatMention struct {
|
|
ID string `json:"id"`
|
|
Name string `json:"name"`
|
|
Type string `json:"type"`
|
|
Node string `json:"node,omitempty"`
|
|
}
|
|
|
|
func canonicalizeChatMentionType(raw string) string {
|
|
normalized := normalizeAITransportResourceType(raw)
|
|
switch normalized {
|
|
case "vm", "node", "agent", "system-container", "app-container", "docker-host", "k8s-cluster", "k8s-node", "k8s-pod", "k8s-deployment", "storage", "disk", "pbs", "pmg", "proxmox", "ceph", "oci-container":
|
|
return normalized
|
|
default:
|
|
return ""
|
|
}
|
|
}
|
|
|
|
// ChatRequest represents a chat request
|
|
type ChatRequest struct {
|
|
Prompt string `json:"prompt"`
|
|
SessionID string `json:"session_id,omitempty"`
|
|
Model string `json:"model,omitempty"`
|
|
Mentions []ChatMention `json:"mentions,omitempty"`
|
|
FindingID string `json:"finding_id,omitempty"`
|
|
}
|
|
|
|
// HandleChat handles POST /api/ai/chat - streaming chat
|
|
func (h *AIHandler) HandleChat(w http.ResponseWriter, r *http.Request) {
|
|
// CORS
|
|
if cfg := h.getConfig(r.Context()); cfg != nil {
|
|
applyConfiguredCORSHeaders(
|
|
w,
|
|
r.Header.Get("Origin"),
|
|
cfg.AllowedOrigins,
|
|
"POST, OPTIONS",
|
|
"Content-Type, Accept, Cookie",
|
|
)
|
|
}
|
|
|
|
if r.Method == http.MethodOptions {
|
|
w.WriteHeader(http.StatusOK)
|
|
return
|
|
}
|
|
|
|
if r.Method != http.MethodPost {
|
|
http.Error(w, "Method not allowed", http.StatusMethodNotAllowed)
|
|
return
|
|
}
|
|
|
|
// Auth already handled by RequireAuth wrapper - no need to check again
|
|
|
|
ctx := r.Context()
|
|
if !h.IsRunning(ctx) {
|
|
http.Error(w, "Pulse Assistant is not running", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
svc := h.GetService(ctx)
|
|
if svc == nil {
|
|
http.Error(w, "Pulse Assistant service not available", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
// Parse request
|
|
var req ChatRequest
|
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
|
http.Error(w, "Invalid request", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
preview := req.Prompt
|
|
if len(preview) > 100 {
|
|
preview = preview[:100] + "..."
|
|
}
|
|
log.Info().
|
|
Str("sessionId", req.SessionID).
|
|
Str("prompt_preview", preview).
|
|
Msg("AIHandler: Received chat request")
|
|
|
|
// Set up SSE
|
|
w.Header().Set("Content-Type", "text/event-stream")
|
|
w.Header().Set("Cache-Control", "no-cache")
|
|
w.Header().Set("Connection", "keep-alive")
|
|
w.Header().Set("X-Accel-Buffering", "no")
|
|
w.Header().Set("Transfer-Encoding", "identity")
|
|
|
|
flusher, ok := w.(http.Flusher)
|
|
if !ok {
|
|
http.Error(w, "Streaming not supported", http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
// Disable timeouts
|
|
rc := http.NewResponseController(w)
|
|
_ = rc.SetWriteDeadline(time.Time{})
|
|
_ = rc.SetReadDeadline(time.Time{})
|
|
|
|
flusher.Flush()
|
|
|
|
// Keep assistant execution bound to the client request so disconnects cancel
|
|
// backend work instead of letting it continue until the hard timeout expires.
|
|
ctx, cancel := context.WithTimeout(r.Context(), 15*time.Minute)
|
|
defer cancel()
|
|
|
|
// Heartbeat
|
|
heartbeatDone := make(chan struct{})
|
|
var clientDisconnected atomic.Bool
|
|
|
|
go func() {
|
|
ticker := time.NewTicker(5 * time.Second)
|
|
defer ticker.Stop()
|
|
for {
|
|
select {
|
|
case <-ctx.Done():
|
|
clientDisconnected.Store(true)
|
|
return
|
|
case <-ticker.C:
|
|
_ = rc.SetWriteDeadline(time.Now().Add(10 * time.Second))
|
|
_, err := w.Write([]byte(": heartbeat\n\n"))
|
|
if err != nil {
|
|
clientDisconnected.Store(true)
|
|
cancel()
|
|
return
|
|
}
|
|
flusher.Flush()
|
|
case <-heartbeatDone:
|
|
return
|
|
}
|
|
}
|
|
}()
|
|
defer close(heartbeatDone)
|
|
|
|
// Write helper
|
|
writeEvent := func(event chat.StreamEvent) {
|
|
if clientDisconnected.Load() {
|
|
return
|
|
}
|
|
data, err := json.Marshal(event)
|
|
if err != nil {
|
|
return
|
|
}
|
|
_ = rc.SetWriteDeadline(time.Now().Add(10 * time.Second))
|
|
_, err = w.Write([]byte("data: " + string(data) + "\n\n"))
|
|
if err != nil {
|
|
clientDisconnected.Store(true)
|
|
cancel()
|
|
return
|
|
}
|
|
flusher.Flush()
|
|
}
|
|
|
|
// Convert API mentions to chat mentions
|
|
var chatMentions []chat.StructuredMention
|
|
for _, m := range req.Mentions {
|
|
mentionType := canonicalizeChatMentionType(m.Type)
|
|
if mentionType == "" {
|
|
log.Warn().
|
|
Str("mention_type", m.Type).
|
|
Str("mention_name", m.Name).
|
|
Msg("Ignoring unsupported chat mention type")
|
|
continue
|
|
}
|
|
chatMentions = append(chatMentions, chat.StructuredMention{
|
|
ID: m.ID,
|
|
Name: m.Name,
|
|
Type: mentionType,
|
|
Node: m.Node,
|
|
})
|
|
}
|
|
|
|
// Augment prompt with finding context when discussing a specific finding
|
|
prompt := req.Prompt
|
|
if req.FindingID != "" {
|
|
store := h.GetUnifiedStoreForOrg(GetOrgID(ctx))
|
|
if store != nil {
|
|
if f := store.Get(req.FindingID); f != nil {
|
|
findingCtx := fmt.Sprintf("[Finding Context]\nID: %s\nTitle: %s\nSeverity: %s\nCategory: %s\nResource: %s (%s)\nDescription: %s",
|
|
f.ID, f.Title, f.Severity, f.Category, f.ResourceName, f.ResourceType, f.Description)
|
|
if f.Recommendation != "" {
|
|
findingCtx += fmt.Sprintf("\nRecommendation: %s", f.Recommendation)
|
|
}
|
|
if f.Evidence != "" {
|
|
findingCtx += fmt.Sprintf("\nEvidence: %s", f.Evidence)
|
|
}
|
|
if f.InvestigationStatus != "" {
|
|
findingCtx += fmt.Sprintf("\nInvestigation Status: %s", f.InvestigationStatus)
|
|
}
|
|
if f.InvestigationOutcome != "" {
|
|
findingCtx += fmt.Sprintf("\nInvestigation Outcome: %s", f.InvestigationOutcome)
|
|
}
|
|
if f.UserNote != "" {
|
|
findingCtx += fmt.Sprintf("\nUser Note: %s", f.UserNote)
|
|
}
|
|
if f.AcknowledgedAt != nil {
|
|
findingCtx += fmt.Sprintf("\nAcknowledged At: %s", f.AcknowledgedAt.Format(time.RFC3339))
|
|
}
|
|
if f.Node != "" {
|
|
findingCtx += fmt.Sprintf("\nNode: %s", f.Node)
|
|
}
|
|
prompt = findingCtx + "\n\n---\nUser message: " + prompt
|
|
}
|
|
}
|
|
}
|
|
|
|
// Stream from AI chat service
|
|
serviceSentDone := false
|
|
err := svc.ExecuteStream(ctx, chat.ExecuteRequest{
|
|
Prompt: prompt,
|
|
SessionID: req.SessionID,
|
|
Model: req.Model,
|
|
Mentions: chatMentions,
|
|
FindingID: req.FindingID,
|
|
}, func(event chat.StreamEvent) {
|
|
if event.Type == "done" {
|
|
serviceSentDone = true
|
|
}
|
|
writeEvent(event)
|
|
})
|
|
|
|
if err != nil {
|
|
log.Error().Err(err).Msg("Chat stream error")
|
|
errData, _ := json.Marshal(chat.ErrorData{Message: "An error occurred while processing your request"})
|
|
writeEvent(chat.StreamEvent{Type: "error", Data: errData})
|
|
}
|
|
|
|
// Send done
|
|
if !serviceSentDone {
|
|
writeEvent(chat.StreamEvent{Type: "done", Data: nil})
|
|
}
|
|
}
|
|
|
|
// HandleSessions handles GET /api/ai/sessions - list sessions
|
|
func (h *AIHandler) HandleSessions(w http.ResponseWriter, r *http.Request) {
|
|
ctx := r.Context()
|
|
if !h.IsRunning(ctx) {
|
|
http.Error(w, "Pulse Assistant is not running", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
svc := h.GetService(ctx)
|
|
if svc == nil {
|
|
http.Error(w, "Pulse Assistant service not available", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
sessions, err := svc.ListSessions(ctx)
|
|
if err != nil {
|
|
http.Error(w, sanitizeErrorForClient(err, "Internal server error"), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
// Optional limit parameter (for relay proxy clients with body size constraints)
|
|
if limitStr := r.URL.Query().Get("limit"); limitStr != "" {
|
|
if limit, err := strconv.Atoi(limitStr); err == nil && limit > 0 && limit < len(sessions) {
|
|
sessions = sessions[:limit]
|
|
}
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(sessions)
|
|
}
|
|
|
|
// HandleCreateSession handles POST /api/ai/sessions - create session
|
|
func (h *AIHandler) HandleCreateSession(w http.ResponseWriter, r *http.Request) {
|
|
ctx := r.Context()
|
|
if !h.IsRunning(ctx) {
|
|
http.Error(w, "Pulse Assistant is not running", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
svc := h.GetService(ctx)
|
|
if svc == nil {
|
|
http.Error(w, "Pulse Assistant service not available", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
session, err := svc.CreateSession(ctx)
|
|
if err != nil {
|
|
http.Error(w, sanitizeErrorForClient(err, "Internal server error"), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(session)
|
|
}
|
|
|
|
// HandleDeleteSession handles DELETE /api/ai/sessions/{id}
|
|
func (h *AIHandler) HandleDeleteSession(w http.ResponseWriter, r *http.Request, sessionID string) {
|
|
ctx := r.Context()
|
|
if !h.IsRunning(ctx) {
|
|
http.Error(w, "Pulse Assistant is not running", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
svc := h.GetService(ctx)
|
|
if svc == nil {
|
|
http.Error(w, "Pulse Assistant service not available", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
if err := svc.DeleteSession(ctx, sessionID); err != nil {
|
|
http.Error(w, sanitizeErrorForClient(err, "Internal server error"), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
w.WriteHeader(http.StatusNoContent)
|
|
}
|
|
|
|
// HandleMessages handles GET /api/ai/sessions/{id}/messages
|
|
func (h *AIHandler) HandleMessages(w http.ResponseWriter, r *http.Request, sessionID string) {
|
|
ctx := r.Context()
|
|
if !h.IsRunning(ctx) {
|
|
http.Error(w, "Pulse Assistant is not running", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
svc := h.GetService(ctx)
|
|
if svc == nil {
|
|
http.Error(w, "Pulse Assistant service not available", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
messages, err := svc.GetMessages(ctx, sessionID)
|
|
if err != nil {
|
|
http.Error(w, sanitizeErrorForClient(err, "Internal server error"), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
// Optional limit parameter — returns the LAST N messages (most recent).
|
|
// Used by relay proxy clients with body size constraints.
|
|
if limitStr := r.URL.Query().Get("limit"); limitStr != "" {
|
|
if limit, err := strconv.Atoi(limitStr); err == nil && limit > 0 && limit < len(messages) {
|
|
messages = messages[len(messages)-limit:]
|
|
}
|
|
}
|
|
for i := range messages {
|
|
messages[i] = messages[i].NormalizeCollections()
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(messages)
|
|
}
|
|
|
|
// HandleAbort handles POST /api/ai/sessions/{id}/abort
|
|
func (h *AIHandler) HandleAbort(w http.ResponseWriter, r *http.Request, sessionID string) {
|
|
ctx := r.Context()
|
|
if !h.IsRunning(ctx) {
|
|
http.Error(w, "Pulse Assistant is not running", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
svc := h.GetService(ctx)
|
|
if svc == nil {
|
|
http.Error(w, "Pulse Assistant service not available", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
if err := svc.AbortSession(ctx, sessionID); err != nil {
|
|
http.Error(w, sanitizeErrorForClient(err, "Internal server error"), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
w.WriteHeader(http.StatusOK)
|
|
}
|
|
|
|
// HandleStatus handles GET /api/ai/status
|
|
func (h *AIHandler) HandleStatus(w http.ResponseWriter, r *http.Request) {
|
|
status := map[string]interface{}{
|
|
"running": h.IsRunning(r.Context()),
|
|
"engine": "direct",
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(status)
|
|
}
|
|
|
|
// HandleSummarize handles POST /api/ai/sessions/{id}/summarize
|
|
// Compresses context when nearing model limits
|
|
func (h *AIHandler) HandleSummarize(w http.ResponseWriter, r *http.Request, sessionID string) {
|
|
ctx := r.Context()
|
|
if !h.IsRunning(ctx) {
|
|
http.Error(w, "Pulse Assistant is not running", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
svc := h.GetService(ctx)
|
|
if svc == nil {
|
|
http.Error(w, "Pulse Assistant service not available", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
result, err := svc.SummarizeSession(ctx, sessionID)
|
|
if err != nil {
|
|
http.Error(w, sanitizeErrorForClient(err, "Internal server error"), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(result)
|
|
}
|
|
|
|
// HandleDiff handles GET /api/ai/sessions/{id}/diff
|
|
// Returns file changes made during the session
|
|
func (h *AIHandler) HandleDiff(w http.ResponseWriter, r *http.Request, sessionID string) {
|
|
ctx := r.Context()
|
|
if !h.IsRunning(ctx) {
|
|
http.Error(w, "Pulse Assistant is not running", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
svc := h.GetService(ctx)
|
|
if svc == nil {
|
|
http.Error(w, "Pulse Assistant service not available", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
diff, err := svc.GetSessionDiff(ctx, sessionID)
|
|
if err != nil {
|
|
http.Error(w, sanitizeErrorForClient(err, "Internal server error"), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(diff)
|
|
}
|
|
|
|
// HandleFork handles POST /api/ai/sessions/{id}/fork
|
|
// Creates a branch point in the conversation
|
|
func (h *AIHandler) HandleFork(w http.ResponseWriter, r *http.Request, sessionID string) {
|
|
ctx := r.Context()
|
|
if !h.IsRunning(ctx) {
|
|
http.Error(w, "Pulse Assistant is not running", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
svc := h.GetService(ctx)
|
|
if svc == nil {
|
|
http.Error(w, "Pulse Assistant service not available", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
session, err := svc.ForkSession(ctx, sessionID)
|
|
if err != nil {
|
|
http.Error(w, sanitizeErrorForClient(err, "Internal server error"), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(session)
|
|
}
|
|
|
|
// HandleRevert handles POST /api/ai/sessions/{id}/revert
|
|
// Reverts file changes from the session
|
|
func (h *AIHandler) HandleRevert(w http.ResponseWriter, r *http.Request, sessionID string) {
|
|
ctx := r.Context()
|
|
if !h.IsRunning(ctx) {
|
|
http.Error(w, "Pulse Assistant is not running", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
svc := h.GetService(ctx)
|
|
if svc == nil {
|
|
http.Error(w, "Pulse Assistant service not available", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
result, err := svc.RevertSession(ctx, sessionID)
|
|
if err != nil {
|
|
http.Error(w, sanitizeErrorForClient(err, "Internal server error"), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(result)
|
|
}
|
|
|
|
// HandleUnrevert handles POST /api/ai/sessions/{id}/unrevert
|
|
// Restores previously reverted changes
|
|
func (h *AIHandler) HandleUnrevert(w http.ResponseWriter, r *http.Request, sessionID string) {
|
|
ctx := r.Context()
|
|
if !h.IsRunning(ctx) {
|
|
http.Error(w, "Pulse Assistant is not running", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
svc := h.GetService(ctx)
|
|
if svc == nil {
|
|
http.Error(w, "Pulse Assistant service not available", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
result, err := svc.UnrevertSession(ctx, sessionID)
|
|
if err != nil {
|
|
http.Error(w, sanitizeErrorForClient(err, "Internal server error"), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
w.Header().Set("Content-Type", "application/json")
|
|
json.NewEncoder(w).Encode(result)
|
|
}
|
|
|
|
// AnswerQuestionRequest represents a request to answer a question
|
|
type AnswerQuestionRequest struct {
|
|
Answers []struct {
|
|
ID string `json:"id"`
|
|
Value string `json:"value"`
|
|
} `json:"answers"`
|
|
}
|
|
|
|
// HandleAnswerQuestion handles POST /api/ai/question/{questionID}/answer
|
|
func (h *AIHandler) HandleAnswerQuestion(w http.ResponseWriter, r *http.Request, questionID string) {
|
|
ctx := r.Context()
|
|
if !h.IsRunning(ctx) {
|
|
http.Error(w, "Pulse Assistant is not running", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
var req AnswerQuestionRequest
|
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
|
http.Error(w, "Invalid request body", http.StatusBadRequest)
|
|
return
|
|
}
|
|
|
|
// Convert to chat.QuestionAnswer
|
|
answers := make([]chat.QuestionAnswer, len(req.Answers))
|
|
for i, a := range req.Answers {
|
|
answers[i] = chat.QuestionAnswer{
|
|
ID: a.ID,
|
|
Value: a.Value,
|
|
}
|
|
}
|
|
|
|
log.Info().
|
|
Str("questionID", questionID).
|
|
Int("answers_count", len(answers)).
|
|
Msg("AIHandler: Received answer to question")
|
|
|
|
svc := h.GetService(ctx)
|
|
if svc == nil {
|
|
http.Error(w, "Pulse Assistant service not available", http.StatusServiceUnavailable)
|
|
return
|
|
}
|
|
|
|
if err := svc.AnswerQuestion(ctx, questionID, answers); err != nil {
|
|
log.Error().Err(err).Str("questionID", questionID).Msg("Failed to answer question")
|
|
http.Error(w, sanitizeErrorForClient(err, "Internal server error"), http.StatusInternalServerError)
|
|
return
|
|
}
|
|
|
|
w.WriteHeader(http.StatusOK)
|
|
}
|
|
|
|
// SetAlertProvider sets the alert provider for MCP tools
|
|
func (h *AIHandler) SetAlertProvider(provider chat.MCPAlertProvider) {
|
|
if svc := h.getDefaultService(); svc != nil {
|
|
svc.SetAlertProvider(provider)
|
|
}
|
|
}
|
|
|
|
// SetFindingsProvider sets the findings provider for MCP tools
|
|
func (h *AIHandler) SetFindingsProvider(provider chat.MCPFindingsProvider) {
|
|
if svc := h.getDefaultService(); svc != nil {
|
|
svc.SetFindingsProvider(provider)
|
|
}
|
|
}
|
|
|
|
// SetBaselineProvider sets the baseline provider for MCP tools
|
|
func (h *AIHandler) SetBaselineProvider(provider chat.MCPBaselineProvider) {
|
|
if svc := h.getDefaultService(); svc != nil {
|
|
svc.SetBaselineProvider(provider)
|
|
}
|
|
}
|
|
|
|
// SetPatternProvider sets the pattern provider for MCP tools
|
|
func (h *AIHandler) SetPatternProvider(provider chat.MCPPatternProvider) {
|
|
if svc := h.getDefaultService(); svc != nil {
|
|
svc.SetPatternProvider(provider)
|
|
}
|
|
}
|
|
|
|
// SetMetricsHistory sets the metrics history provider for MCP tools
|
|
func (h *AIHandler) SetMetricsHistory(provider chat.MCPMetricsHistoryProvider) {
|
|
if svc := h.getDefaultService(); svc != nil {
|
|
svc.SetMetricsHistory(provider)
|
|
}
|
|
}
|
|
|
|
// SetAgentProfileManager sets the agent profile manager for MCP tools
|
|
func (h *AIHandler) SetAgentProfileManager(manager chat.AgentProfileManager) {
|
|
if svc := h.getDefaultService(); svc != nil {
|
|
svc.SetAgentProfileManager(manager)
|
|
}
|
|
}
|
|
|
|
// SetGuestConfigProvider sets the guest config provider for MCP tools
|
|
func (h *AIHandler) SetGuestConfigProvider(provider chat.MCPGuestConfigProvider) {
|
|
if svc := h.getDefaultService(); svc != nil {
|
|
svc.SetGuestConfigProvider(provider)
|
|
}
|
|
}
|
|
|
|
// SetAppContainerConfigProvider sets the native app-container config provider for MCP tools
|
|
func (h *AIHandler) SetAppContainerConfigProvider(provider chat.MCPAppContainerConfigProvider) {
|
|
if svc := h.getDefaultService(); svc != nil {
|
|
svc.SetAppContainerConfigProvider(provider)
|
|
}
|
|
}
|
|
|
|
// SetBackupProvider sets the backup provider for MCP tools
|
|
func (h *AIHandler) SetBackupProvider(provider chat.MCPBackupProvider) {
|
|
if svc := h.getDefaultService(); svc != nil {
|
|
svc.SetBackupProvider(provider)
|
|
}
|
|
}
|
|
|
|
// SetDiskHealthProvider sets the disk health provider for MCP tools
|
|
func (h *AIHandler) SetDiskHealthProvider(provider chat.MCPDiskHealthProvider) {
|
|
if svc := h.getDefaultService(); svc != nil {
|
|
svc.SetDiskHealthProvider(provider)
|
|
}
|
|
}
|
|
|
|
// SetUpdatesProvider sets the updates provider for MCP tools
|
|
func (h *AIHandler) SetUpdatesProvider(provider chat.MCPUpdatesProvider) {
|
|
if svc := h.getDefaultService(); svc != nil {
|
|
svc.SetUpdatesProvider(provider)
|
|
}
|
|
}
|
|
|
|
// SetFindingsManager sets the findings manager for MCP tools
|
|
func (h *AIHandler) SetFindingsManager(manager chat.FindingsManager) {
|
|
if svc := h.getDefaultService(); svc != nil {
|
|
svc.SetFindingsManager(manager)
|
|
}
|
|
}
|
|
|
|
// SetMetadataUpdater sets the metadata updater for MCP tools
|
|
func (h *AIHandler) SetMetadataUpdater(updater chat.MetadataUpdater) {
|
|
if svc := h.getDefaultService(); svc != nil {
|
|
svc.SetMetadataUpdater(updater)
|
|
}
|
|
}
|
|
|
|
// SetUnifiedResourceProvider sets the unified resource provider for MCP tools
|
|
func (h *AIHandler) SetUnifiedResourceProvider(provider chat.MCPUnifiedResourceProvider) {
|
|
if svc := h.getDefaultService(); svc != nil {
|
|
svc.SetUnifiedResourceProvider(provider)
|
|
}
|
|
}
|
|
|
|
// UpdateControlSettings updates control settings in the service
|
|
func (h *AIHandler) UpdateControlSettings(cfg *config.AIConfig) {
|
|
if svc := h.getDefaultService(); svc != nil {
|
|
svc.UpdateControlSettings(cfg)
|
|
}
|
|
}
|