Normalize hosted quickstart aliases at the contract boundary

This commit is contained in:
rcourtman 2026-04-03 22:13:40 +01:00
parent ac4872c8d6
commit 2dade3ec8d
11 changed files with 274 additions and 15 deletions

View file

@ -141,6 +141,11 @@ management, and fleet control surfaces.
`quickstart:pulse-hosted` alias in AI settings payloads, but they must not
bake vendor model IDs or provider-model fallback rules into install or
activation flows just because those routes share the backend API tree.
Persisted legacy hosted quickstart model IDs are therefore not lifecycle
truth either: when shared settings helpers load or save historical
quickstart values, they must normalize them back to
`quickstart:pulse-hosted` before adjacent install or activation flows read
the payload.
The machine-scoped quickstart authority must stay canonical too:
tenant-local lifecycle routes may reuse shared installation activation or
effective entitlement billing state, but they must not fork per-org

View file

@ -244,6 +244,13 @@ the request runs under a hosted tenant org with no org-local billing lease,
the same AI runtime path must inherit the default hosted lease for bootstrap
and quickstart-credit reads so tenant-scoped Chat, Patrol, and AI Settings
stay aligned with the machine-owned hosted entitlement state.
That same hosted and self-hosted settings boundary must also normalize legacy
hosted quickstart model aliases on read and write. Persisted values such as
`quickstart:minimax-2.5m` are historical implementation detail, not governed
runtime truth, so `internal/config/ai.go`,
`internal/config/persistence.go`, and `internal/api/ai_handlers.go` must
rewrite them to `quickstart:pulse-hosted` before the runtime, API payloads,
or structured logs consume those fields.
That same runtime boundary also owns approval-store lifecycle in
`internal/api/ai_handler.go`. Settings-driven enablement and restart must be
able to cold-start the direct AI runtime, initialize approval persistence, and

View file

@ -244,6 +244,11 @@ Own canonical runtime payload shapes between backend and frontend.
activation/trial identity must surface as the canonical activation-required
quickstart block reason for Patrol and AI settings enablement rather than
silently attempting anonymous bootstrap
and the Pulse-owned hosted model alias rule, so persisted legacy hosted
quickstart model IDs such as `quickstart:minimax-2.5m` are rewritten to
`quickstart:pulse-hosted` before `/api/settings/ai` responds, instead of
leaking stale vendor identifiers back into the governed payload contract
for model, chat, patrol, discovery, or auto-fix fields
and the AI settings blocked-reason contract, so `/api/settings/ai` must
expose `quickstart_blocked_reason` when quickstart cannot currently enable
Patrol and must clear that field when a provider-backed path is active or

View file

@ -218,6 +218,12 @@ querying, and the operator-facing storage health presentation layer.
unified-resource metrics-target IDs, and the storage page must reuse the
shared sticky summary primitive instead of a storage-local scroll wrapper.
13. Keep storage summary interaction scoped through the same canonical IDs.
14. Keep adjacent AI settings persistence vendor-neutral on the shared
`internal/api/` boundary. When storage- or recovery-adjacent hosted flows
load or save AI settings through shared helpers, any historical hosted
quickstart model IDs must be normalized back to the governed alias
`quickstart:pulse-hosted` before adjacent surfaces read or re-emit that
state.
When operators hover or focus pools versus physical disks, the storage
summary must reuse one resolved active-series ID across card state and
chart highlighting so pool-only cards demote cleanly during disk focus and
@ -1816,6 +1822,12 @@ explicit AI config exists. Adjacent recovery surfaces must not invent their
own "AI disabled until configured" fallback or synthetic activation state when
the hosted runtime already has enough entitlement proof to bootstrap the
machine-owned default.
That same shared persistence path must also rewrite historical hosted
quickstart model IDs to the Pulse-owned alias before adjacent recovery or
storage flows read AI settings state. Support and recovery surfaces may
observe `quickstart:pulse-hosted`, but they must not inherit or re-emit stale
vendor IDs from old `ai.enc` payloads just because the shared settings helper
touched persistence on the way through.
That same shared settings helper layer must then preserve canonical
org-management privilege for non-default tenant requests. Storage- and
recovery-adjacent hosted flows that reuse settings-bound helpers must allow

View file

@ -2350,6 +2350,7 @@ func (h *AISettingsHandler) HandleGetAISettings(w http.ResponseWriter, r *http.R
if settings == nil {
settings = config.NewDefaultAIConfig()
}
settings.NormalizeQuickstartModelAliases()
if aiSettingsRequireModelResolution(settings) {
if resolvedModel, resolveErr := ai.ResolveConfiguredModel(ctx, settings); resolveErr == nil {
settings.Model = resolvedModel
@ -2369,9 +2370,9 @@ func (h *AISettingsHandler) HandleGetAISettings(w http.ResponseWriter, r *http.R
response := AISettingsResponse{
Enabled: settings.Enabled || isDemo,
Model: settings.GetModel(),
ChatModel: settings.ChatModel,
PatrolModel: settings.PatrolModel,
AutoFixModel: settings.AutoFixModel,
ChatModel: config.NormalizeQuickstartModelString(settings.ChatModel),
PatrolModel: config.NormalizeQuickstartModelString(settings.PatrolModel),
AutoFixModel: config.NormalizeQuickstartModelString(settings.AutoFixModel),
Configured: settings.IsConfigured() || isDemo,
CustomContext: settings.CustomContext,
AuthMethod: authMethod,
@ -2748,6 +2749,7 @@ func (h *AISettingsHandler) HandleUpdateAISettings(w http.ResponseWriter, r *htt
}
settings.Model = resolvedModel
}
settings.NormalizeQuickstartModelAliases()
// Save settings
if err := h.getPersistence(r.Context()).SaveAIConfig(*settings); err != nil {
@ -2790,8 +2792,8 @@ func (h *AISettingsHandler) HandleUpdateAISettings(w http.ResponseWriter, r *htt
Bool("enabled", settings.Enabled).
Str("provider", providerName).
Str("model", settings.GetModel()).
Str("chatModel", settings.ChatModel).
Str("patrolModel", settings.PatrolModel).
Str("chatModel", config.NormalizeQuickstartModelString(settings.ChatModel)).
Str("patrolModel", config.NormalizeQuickstartModelString(settings.PatrolModel)).
Bool("alertTriggeredAnalysis", settings.AlertTriggeredAnalysis).
Msg("AI settings updated")
@ -2806,9 +2808,9 @@ func (h *AISettingsHandler) HandleUpdateAISettings(w http.ResponseWriter, r *htt
response := AISettingsResponse{
Enabled: settings.Enabled,
Model: settings.GetModel(),
ChatModel: settings.ChatModel,
PatrolModel: settings.PatrolModel,
AutoFixModel: settings.AutoFixModel,
ChatModel: config.NormalizeQuickstartModelString(settings.ChatModel),
PatrolModel: config.NormalizeQuickstartModelString(settings.PatrolModel),
AutoFixModel: config.NormalizeQuickstartModelString(settings.AutoFixModel),
Configured: settings.IsConfigured(),
CustomContext: settings.CustomContext,
AuthMethod: authMethod,

View file

@ -6,6 +6,8 @@ import (
"encoding/json"
"net/http"
"net/http/httptest"
"os"
"path/filepath"
"strings"
"testing"
"time"
@ -318,6 +320,40 @@ func TestAISettingsHandler_GetHostedTenantSettings_InheritsDefaultHostedBillingS
assert.True(t, persistence.HasAIConfig())
}
func TestAISettingsHandler_GetSettings_NormalizesLegacyQuickstartAliases(t *testing.T) {
tmp := t.TempDir()
mtp := config.NewMultiTenantPersistence(tmp)
persistence, err := mtp.GetPersistence("default")
require.NoError(t, err)
raw, err := json.Marshal(map[string]any{
"enabled": true,
"model": "quickstart:minimax-2.5m",
"chat_model": "quickstart:minimax-2.5m",
"patrol_model": "quickstart:minimax-2.5m",
"auto_fix_model": "quickstart:minimax-2.5m",
})
require.NoError(t, err)
require.NoError(t, os.WriteFile(filepath.Join(persistence.DataDir(), "ai.enc"), raw, 0o600))
handler := NewAISettingsHandler(mtp, nil, nil)
handler.defaultConfig = &config.Config{DataPath: tmp}
req := httptest.NewRequest(http.MethodGet, "/api/settings/ai", nil)
rec := httptest.NewRecorder()
handler.HandleGetAISettings(rec, req)
require.Equal(t, http.StatusOK, rec.Code, "body=%s", rec.Body.String())
var resp AISettingsResponse
require.NoError(t, json.Unmarshal(rec.Body.Bytes(), &resp))
want := config.DefaultModelForProvider(config.AIProviderQuickstart)
assert.Equal(t, want, resp.Model)
assert.Equal(t, want, resp.ChatModel)
assert.Equal(t, want, resp.PatrolModel)
assert.Equal(t, want, resp.AutoFixModel)
}
func TestAISettingsHandler_GetSettings_QuickstartActivationRequiredSurface(t *testing.T) {
tmp := t.TempDir()
cfg := &config.Config{DataPath: tmp}

View file

@ -2204,6 +2204,76 @@ func TestContract_HostedAISettingsAutoBootstrapJSONSnapshot(t *testing.T) {
assertJSONSnapshot(t, rec.Body.Bytes(), want)
}
func TestContract_AISettingsLegacyQuickstartAliasJSONSnapshot(t *testing.T) {
tmp := t.TempDir()
cfg := &config.Config{DataPath: tmp}
persistence := config.NewConfigPersistence(tmp)
aiCfg := config.NewDefaultAIConfig()
aiCfg.Enabled = true
aiCfg.Model = "quickstart:minimax-2.5m"
aiCfg.ChatModel = "quickstart:minimax-2.5m"
aiCfg.PatrolModel = "quickstart:minimax-2.5m"
aiCfg.DiscoveryModel = "quickstart:minimax-2.5m"
aiCfg.AutoFixModel = "quickstart:minimax-2.5m"
if err := persistence.SaveAIConfig(*aiCfg); err != nil {
t.Fatalf("SaveAIConfig: %v", err)
}
handler := newTestAISettingsHandler(cfg, persistence, nil)
req := httptest.NewRequest(http.MethodGet, "/api/settings/ai", nil)
rec := httptest.NewRecorder()
handler.HandleGetAISettings(rec, req)
if rec.Code != http.StatusOK {
t.Fatalf("status=%d, want %d: %s", rec.Code, http.StatusOK, rec.Body.String())
}
const want = `{
"enabled":true,
"model":"quickstart:pulse-hosted",
"chat_model":"quickstart:pulse-hosted",
"patrol_model":"quickstart:pulse-hosted",
"auto_fix_model":"quickstart:pulse-hosted",
"configured":false,
"custom_context":"",
"auth_method":"api_key",
"oauth_connected":false,
"patrol_interval_minutes":360,
"patrol_enabled":true,
"patrol_auto_fix":false,
"alert_triggered_analysis":true,
"patrol_event_triggers_enabled":true,
"patrol_alert_triggers_enabled":true,
"patrol_anomaly_triggers_enabled":true,
"use_proactive_thresholds":false,
"available_models":[],
"anthropic_configured":false,
"openai_configured":false,
"openrouter_configured":false,
"deepseek_configured":false,
"gemini_configured":false,
"ollama_configured":false,
"ollama_base_url":"http://localhost:11434",
"ollama_password_set":false,
"configured_providers":[],
"control_level":"read_only",
"protected_guests":[],
"discovery_enabled":false,
"quickstart_credits_total":0,
"quickstart_credits_used":0,
"quickstart_credits_remaining":0,
"quickstart_credits_available":false,
"using_quickstart":false
}`
assertJSONSnapshot(t, rec.Body.Bytes(), want)
if bytes.Contains(rec.Body.Bytes(), []byte("quickstart:minimax-2.5m")) {
t.Fatalf("expected AI settings payload to suppress legacy hosted quickstart aliases, got %s", rec.Body.Bytes())
}
}
func TestContract_AISettingsOllamaAuthJSONSnapshot(t *testing.T) {
tmp := t.TempDir()
cfg := &config.Config{DataPath: tmp}

View file

@ -325,6 +325,23 @@ func FormatModelString(provider, modelName string) string {
return provider + ":" + modelName
}
// NormalizeQuickstartModelString canonicalizes legacy quickstart model strings to
// Pulse's owned hosted alias. The server chooses the real upstream vendor model.
func NormalizeQuickstartModelString(model string) string {
model = strings.TrimSpace(model)
if model == "" {
return ""
}
if strings.EqualFold(model, DefaultAIModelQuickstart) || strings.EqualFold(model, AIProviderQuickstart) {
return DefaultModelForProvider(AIProviderQuickstart)
}
provider, _ := ParseModelString(model)
if provider == AIProviderQuickstart {
return DefaultModelForProvider(AIProviderQuickstart)
}
return model
}
// DefaultModelForProvider returns the default "provider:model" string for a given provider name.
// Returns empty string if the provider is unknown.
func DefaultModelForProvider(provider string) string {
@ -338,7 +355,36 @@ func DefaultModelForProvider(provider string) string {
// GetModel returns the explicitly configured model, if any.
func (c *AIConfig) GetModel() string {
return strings.TrimSpace(c.Model)
if c == nil {
return ""
}
return NormalizeQuickstartModelString(c.Model)
}
// NormalizeQuickstartModelAliases rewrites any legacy quickstart model strings in-place
// to the owned Pulse alias. Returns true when a field changed.
func (c *AIConfig) NormalizeQuickstartModelAliases() bool {
if c == nil {
return false
}
changed := false
normalizeField := func(field *string) {
normalized := NormalizeQuickstartModelString(*field)
if normalized == strings.TrimSpace(*field) {
return
}
*field = normalized
changed = true
}
normalizeField(&c.Model)
normalizeField(&c.ChatModel)
normalizeField(&c.PatrolModel)
normalizeField(&c.DiscoveryModel)
normalizeField(&c.AutoFixModel)
return changed
}
// GetPreferredModelForProvider returns the most relevant configured model for a provider.
@ -346,7 +392,7 @@ func (c *AIConfig) GetModel() string {
// provider-owned quickstart alias when applicable.
func (c *AIConfig) GetPreferredModelForProvider(provider string) string {
for _, candidate := range []string{c.Model, c.ChatModel, c.PatrolModel, c.AutoFixModel, c.DiscoveryModel} {
candidate = strings.TrimSpace(candidate)
candidate = NormalizeQuickstartModelString(candidate)
if candidate == "" {
continue
}
@ -366,7 +412,7 @@ func (c *AIConfig) GetPreferredModelForProvider(provider string) string {
// Falls back to the main Model if ChatModel is not set
func (c *AIConfig) GetChatModel() string {
if c.ChatModel != "" {
return c.ChatModel
return NormalizeQuickstartModelString(c.ChatModel)
}
return c.GetModel()
}
@ -375,7 +421,7 @@ func (c *AIConfig) GetChatModel() string {
// Falls back to the main Model if PatrolModel is not set
func (c *AIConfig) GetPatrolModel() string {
if c.PatrolModel != "" {
return c.PatrolModel
return NormalizeQuickstartModelString(c.PatrolModel)
}
return c.GetModel()
}
@ -384,7 +430,7 @@ func (c *AIConfig) GetPatrolModel() string {
// Falls back to the main model since discovery needs to use the same provider
func (c *AIConfig) GetDiscoveryModel() string {
if c.DiscoveryModel != "" {
return c.DiscoveryModel
return NormalizeQuickstartModelString(c.DiscoveryModel)
}
// Fall back to the main model to ensure we use the same provider
return c.GetModel()
@ -395,7 +441,7 @@ func (c *AIConfig) GetDiscoveryModel() string {
// Auto-fix may warrant a more capable model since it takes actions
func (c *AIConfig) GetAutoFixModel() string {
if c.AutoFixModel != "" {
return c.AutoFixModel
return NormalizeQuickstartModelString(c.AutoFixModel)
}
return c.GetPatrolModel()
}

View file

@ -455,6 +455,11 @@ func TestAIConfig_GetModel(t *testing.T) {
config: AIConfig{},
expected: "",
},
{
name: "legacy quickstart model normalizes to owned alias",
config: AIConfig{Model: "quickstart:minimax-2.5m"},
expected: DefaultModelForProvider(AIProviderQuickstart),
},
}
for _, tt := range tests {
@ -486,6 +491,39 @@ func TestAIConfig_GetChatModel(t *testing.T) {
t.Errorf("GetChatModel() = %q, want 'main-model'", result)
}
})
t.Run("normalizes legacy quickstart chat model", func(t *testing.T) {
config := AIConfig{
Model: "openai:gpt-4o-mini",
ChatModel: "quickstart:minimax-2.5m",
}
if result := config.GetChatModel(); result != DefaultModelForProvider(AIProviderQuickstart) {
t.Errorf("GetChatModel() = %q, want %q", result, DefaultModelForProvider(AIProviderQuickstart))
}
})
}
func TestAIConfig_NormalizeQuickstartModelAliases(t *testing.T) {
config := AIConfig{
Model: "quickstart:minimax-2.5m",
ChatModel: "pulse-hosted",
PatrolModel: "quickstart:anything",
DiscoveryModel: "",
AutoFixModel: "quickstart:legacy-provider-model",
}
changed := config.NormalizeQuickstartModelAliases()
if !changed {
t.Fatal("expected quickstart alias normalization to report a change")
}
want := DefaultModelForProvider(AIProviderQuickstart)
if config.Model != want || config.ChatModel != want || config.PatrolModel != want || config.AutoFixModel != want {
t.Fatalf("NormalizeQuickstartModelAliases() = %#v, want all quickstart fields normalized to %q", config, want)
}
if config.DiscoveryModel != "" {
t.Fatalf("expected empty discovery model to remain empty, got %q", config.DiscoveryModel)
}
}
func TestAIConfig_GetPreferredModelForProvider(t *testing.T) {

View file

@ -1933,6 +1933,7 @@ func (c *ConfigPersistence) SaveAIConfig(settings AIConfig) error {
defer c.mu.Unlock()
settings.NormalizePatrolEventTriggerSettings()
settings.NormalizeQuickstartModelAliases()
if err := c.EnsureConfigDir(); err != nil {
return fmt.Errorf("prepare config directory for ai config: %w", err)
@ -2037,8 +2038,9 @@ func (c *ConfigPersistence) LoadAIConfig() (*AIConfig, error) {
settings.ControlLevel = ControlLevelControlled
migratedControlLevel = true
}
migratedQuickstartAliases := settings.NormalizeQuickstartModelAliases()
if migratedPlaintext || migratedLegacyFields || migratedControlLevel || migratedPatrolTriggerFields {
if migratedPlaintext || migratedLegacyFields || migratedControlLevel || migratedPatrolTriggerFields || migratedQuickstartAliases {
jsonData, err := json.Marshal(*settings)
if err != nil {
return nil, fmt.Errorf("marshal ai config migration rewrite: %w", err)
@ -2050,6 +2052,7 @@ func (c *ConfigPersistence) LoadAIConfig() (*AIConfig, error) {
Str("control_level", settings.ControlLevel).
Bool("legacy_fields_migrated", migratedLegacyFields).
Bool("patrol_trigger_fields_migrated", migratedPatrolTriggerFields).
Bool("quickstart_aliases_migrated", migratedQuickstartAliases).
Bool("plaintext_migrated", migratedPlaintext).
Msg("Migrated AI configuration")
}

View file

@ -69,6 +69,41 @@ func TestLoadAIConfig_Branches(t *testing.T) {
assert.NotContains(t, string(rewritten), "secret-key")
}
func TestLoadAIConfig_MigratesLegacyQuickstartAlias(t *testing.T) {
tempDir := t.TempDir()
cp := NewConfigPersistence(tempDir)
aiFile := filepath.Join(tempDir, "ai.enc")
legacyConfig := map[string]interface{}{
"enabled": true,
"model": "quickstart:minimax-2.5m",
"chat_model": "quickstart:minimax-2.5m",
"patrol_model": "quickstart:minimax-2.5m",
"auto_fix_model": "quickstart:minimax-2.5m",
}
raw, err := json.Marshal(legacyConfig)
if err != nil {
t.Fatalf("marshal legacy config: %v", err)
}
if err := os.WriteFile(aiFile, raw, 0o600); err != nil {
t.Fatalf("WriteFile(ai.enc): %v", err)
}
settings, err := cp.LoadAIConfig()
assert.NoError(t, err)
want := DefaultModelForProvider(AIProviderQuickstart)
assert.Equal(t, want, settings.Model)
assert.Equal(t, want, settings.ChatModel)
assert.Equal(t, want, settings.PatrolModel)
assert.Equal(t, want, settings.AutoFixModel)
rewritten, err := os.ReadFile(aiFile)
assert.NoError(t, err)
assert.False(t, bytes.Equal(rewritten, raw))
assert.NotContains(t, string(rewritten), "quickstart:minimax-2.5m")
}
func TestLoadAIFindings_Branches(t *testing.T) {
tempDir := t.TempDir()
cp := NewConfigPersistence(tempDir)