mirror of
https://github.com/Alishahryar1/free-claude-code.git
synced 2026-04-28 11:30:03 +00:00
458 lines
17 KiB
Python
458 lines
17 KiB
Python
"""Tests for config/settings.py and config/nim.py"""
|
|
|
|
import pytest
|
|
from pydantic import ValidationError
|
|
|
|
from config.nim import NimSettings
|
|
|
|
|
|
class TestSettings:
|
|
"""Test Settings configuration."""
|
|
|
|
def test_settings_loads(self):
|
|
"""Ensure Settings can be instantiated."""
|
|
from config.settings import Settings
|
|
|
|
settings = Settings()
|
|
assert settings is not None
|
|
|
|
def test_default_values(self):
|
|
"""Test default values are set and have correct types."""
|
|
from config.settings import Settings
|
|
|
|
settings = Settings()
|
|
assert isinstance(settings.provider_rate_limit, int)
|
|
assert isinstance(settings.provider_rate_window, int)
|
|
assert isinstance(settings.nim.temperature, float)
|
|
assert isinstance(settings.fast_prefix_detection, bool)
|
|
|
|
def test_get_settings_cached(self):
|
|
"""Test get_settings returns cached instance."""
|
|
from config.settings import get_settings
|
|
|
|
s1 = get_settings()
|
|
s2 = get_settings()
|
|
assert s1 is s2 # Same object (cached)
|
|
|
|
def test_empty_string_to_none_for_optional_int(self):
|
|
"""Test that empty string converts to None for optional int fields."""
|
|
from config.settings import Settings
|
|
|
|
# Settings should handle NVIDIA_NIM_SEED="" gracefully
|
|
settings = Settings()
|
|
assert settings.nim.seed is None or isinstance(settings.nim.seed, int)
|
|
|
|
def test_model_setting(self):
|
|
"""Test model setting exists and is a string."""
|
|
from config.settings import Settings
|
|
|
|
settings = Settings()
|
|
assert isinstance(settings.model, str)
|
|
assert len(settings.model) > 0
|
|
|
|
def test_base_url_constant(self):
|
|
"""Test NVIDIA_NIM_BASE_URL is a constant."""
|
|
from providers.nvidia_nim import NVIDIA_NIM_BASE_URL
|
|
|
|
assert NVIDIA_NIM_BASE_URL == "https://integrate.api.nvidia.com/v1"
|
|
|
|
def test_lm_studio_base_url_from_env(self, monkeypatch):
|
|
"""LM_STUDIO_BASE_URL env var is loaded into settings."""
|
|
from config.settings import Settings
|
|
|
|
monkeypatch.setenv("LM_STUDIO_BASE_URL", "http://custom:5678/v1")
|
|
settings = Settings()
|
|
assert settings.lm_studio_base_url == "http://custom:5678/v1"
|
|
|
|
def test_provider_rate_limit_from_env(self, monkeypatch):
|
|
"""PROVIDER_RATE_LIMIT env var is loaded into settings."""
|
|
from config.settings import Settings
|
|
|
|
monkeypatch.setenv("PROVIDER_RATE_LIMIT", "20")
|
|
settings = Settings()
|
|
assert settings.provider_rate_limit == 20
|
|
|
|
def test_provider_rate_window_from_env(self, monkeypatch):
|
|
"""PROVIDER_RATE_WINDOW env var is loaded into settings."""
|
|
from config.settings import Settings
|
|
|
|
monkeypatch.setenv("PROVIDER_RATE_WINDOW", "30")
|
|
settings = Settings()
|
|
assert settings.provider_rate_window == 30
|
|
|
|
def test_http_read_timeout_from_env(self, monkeypatch):
|
|
"""HTTP_READ_TIMEOUT env var is loaded into settings."""
|
|
from config.settings import Settings
|
|
|
|
monkeypatch.setenv("HTTP_READ_TIMEOUT", "600")
|
|
settings = Settings()
|
|
assert settings.http_read_timeout == 600.0
|
|
|
|
def test_http_write_timeout_from_env(self, monkeypatch):
|
|
"""HTTP_WRITE_TIMEOUT env var is loaded into settings."""
|
|
from config.settings import Settings
|
|
|
|
monkeypatch.setenv("HTTP_WRITE_TIMEOUT", "20")
|
|
settings = Settings()
|
|
assert settings.http_write_timeout == 20.0
|
|
|
|
def test_http_connect_timeout_from_env(self, monkeypatch):
|
|
"""HTTP_CONNECT_TIMEOUT env var is loaded into settings."""
|
|
from config.settings import Settings
|
|
|
|
monkeypatch.setenv("HTTP_CONNECT_TIMEOUT", "5")
|
|
settings = Settings()
|
|
assert settings.http_connect_timeout == 5.0
|
|
|
|
|
|
# --- NimSettings Validation Tests ---
|
|
class TestNimSettingsValidBounds:
|
|
"""Test that valid values within bounds are accepted."""
|
|
|
|
@pytest.mark.parametrize("top_k", [-1, 0, 1, 100])
|
|
def test_top_k_valid(self, top_k):
|
|
"""top_k >= -1 should be accepted."""
|
|
s = NimSettings(top_k=top_k)
|
|
assert s.top_k == top_k
|
|
|
|
@pytest.mark.parametrize("temp", [0.0, 0.5, 1.0, 2.0])
|
|
def test_temperature_valid(self, temp):
|
|
s = NimSettings(temperature=temp)
|
|
assert s.temperature == temp
|
|
|
|
@pytest.mark.parametrize("top_p", [0.0, 0.5, 1.0])
|
|
def test_top_p_valid(self, top_p):
|
|
s = NimSettings(top_p=top_p)
|
|
assert s.top_p == top_p
|
|
|
|
@pytest.mark.parametrize("effort", ["low", "medium", "high"])
|
|
def test_reasoning_effort_valid(self, effort):
|
|
s = NimSettings(reasoning_effort=effort)
|
|
assert s.reasoning_effort == effort
|
|
|
|
def test_max_tokens_valid(self):
|
|
s = NimSettings(max_tokens=1)
|
|
assert s.max_tokens == 1
|
|
|
|
def test_min_tokens_valid(self):
|
|
s = NimSettings(min_tokens=0)
|
|
assert s.min_tokens == 0
|
|
|
|
@pytest.mark.parametrize("penalty", [-2.0, 0.0, 2.0])
|
|
def test_presence_penalty_valid(self, penalty):
|
|
s = NimSettings(presence_penalty=penalty)
|
|
assert s.presence_penalty == penalty
|
|
|
|
@pytest.mark.parametrize("penalty", [-2.0, 0.0, 2.0])
|
|
def test_frequency_penalty_valid(self, penalty):
|
|
s = NimSettings(frequency_penalty=penalty)
|
|
assert s.frequency_penalty == penalty
|
|
|
|
@pytest.mark.parametrize("min_p", [0.0, 0.5, 1.0])
|
|
def test_min_p_valid(self, min_p):
|
|
s = NimSettings(min_p=min_p)
|
|
assert s.min_p == min_p
|
|
|
|
|
|
class TestNimSettingsInvalidBounds:
|
|
"""Test that out-of-range values raise ValidationError."""
|
|
|
|
@pytest.mark.parametrize("top_k", [-2, -100])
|
|
def test_top_k_below_lower_bound(self, top_k):
|
|
with pytest.raises((ValidationError, ValueError)):
|
|
NimSettings(top_k=top_k)
|
|
|
|
def test_temperature_negative(self):
|
|
with pytest.raises(ValidationError):
|
|
NimSettings(temperature=-0.1)
|
|
|
|
@pytest.mark.parametrize("top_p", [-0.1, 1.1])
|
|
def test_top_p_out_of_range(self, top_p):
|
|
with pytest.raises(ValidationError):
|
|
NimSettings(top_p=top_p)
|
|
|
|
@pytest.mark.parametrize("penalty", [-2.1, 2.1])
|
|
def test_presence_penalty_out_of_range(self, penalty):
|
|
with pytest.raises(ValidationError):
|
|
NimSettings(presence_penalty=penalty)
|
|
|
|
@pytest.mark.parametrize("penalty", [-2.1, 2.1])
|
|
def test_frequency_penalty_out_of_range(self, penalty):
|
|
with pytest.raises(ValidationError):
|
|
NimSettings(frequency_penalty=penalty)
|
|
|
|
@pytest.mark.parametrize("min_p", [-0.1, 1.1])
|
|
def test_min_p_out_of_range(self, min_p):
|
|
with pytest.raises(ValidationError):
|
|
NimSettings(min_p=min_p)
|
|
|
|
@pytest.mark.parametrize("max_tokens", [0, -1])
|
|
def test_max_tokens_too_low(self, max_tokens):
|
|
with pytest.raises(ValidationError):
|
|
NimSettings(max_tokens=max_tokens)
|
|
|
|
def test_min_tokens_negative(self):
|
|
with pytest.raises(ValidationError):
|
|
NimSettings(min_tokens=-1)
|
|
|
|
def test_reasoning_effort_invalid(self):
|
|
from typing import Any, cast
|
|
|
|
with pytest.raises(ValidationError):
|
|
NimSettings(reasoning_effort=cast(Any, "invalid"))
|
|
|
|
|
|
class TestNimSettingsValidators:
|
|
"""Test custom field validators in NimSettings."""
|
|
|
|
@pytest.mark.parametrize(
|
|
"seed_val,expected",
|
|
[("", None), (None, None), ("42", 42), (42, 42)],
|
|
ids=["empty_str", "none", "str_42", "int_42"],
|
|
)
|
|
def test_parse_optional_int(self, seed_val, expected):
|
|
s = NimSettings(seed=seed_val)
|
|
assert s.seed == expected
|
|
|
|
@pytest.mark.parametrize(
|
|
"stop_val,expected",
|
|
[("", None), ("STOP", "STOP"), (None, None)],
|
|
ids=["empty_str", "valid", "none"],
|
|
)
|
|
def test_parse_optional_str_stop(self, stop_val, expected):
|
|
s = NimSettings(stop=stop_val)
|
|
assert s.stop == expected
|
|
|
|
@pytest.mark.parametrize(
|
|
"chat_template_val,expected",
|
|
[("", None), ("template", "template")],
|
|
ids=["empty_str", "valid"],
|
|
)
|
|
def test_parse_optional_str_chat_template(self, chat_template_val, expected):
|
|
s = NimSettings(chat_template=chat_template_val)
|
|
assert s.chat_template == expected
|
|
|
|
def test_extra_forbid_rejects_unknown_field(self):
|
|
"""NimSettings with extra='forbid' rejects unknown fields."""
|
|
from typing import Any, cast
|
|
|
|
with pytest.raises(ValidationError):
|
|
NimSettings(**cast(Any, {"unknown_field": "value"}))
|
|
|
|
|
|
class TestSettingsOptionalStr:
|
|
"""Test Settings parse_optional_str validator."""
|
|
|
|
def test_empty_telegram_token_to_none(self, monkeypatch):
|
|
from config.settings import Settings
|
|
|
|
monkeypatch.setenv("TELEGRAM_BOT_TOKEN", "")
|
|
s = Settings()
|
|
assert s.telegram_bot_token is None
|
|
|
|
def test_valid_telegram_token_preserved(self, monkeypatch):
|
|
from config.settings import Settings
|
|
|
|
monkeypatch.setenv("TELEGRAM_BOT_TOKEN", "abc123")
|
|
s = Settings()
|
|
assert s.telegram_bot_token == "abc123"
|
|
|
|
def test_empty_allowed_user_id_to_none(self, monkeypatch):
|
|
from config.settings import Settings
|
|
|
|
monkeypatch.setenv("ALLOWED_TELEGRAM_USER_ID", "")
|
|
s = Settings()
|
|
assert s.allowed_telegram_user_id is None
|
|
|
|
def test_discord_bot_token_from_env(self, monkeypatch):
|
|
from config.settings import Settings
|
|
|
|
monkeypatch.setenv("DISCORD_BOT_TOKEN", "discord_token_123")
|
|
s = Settings()
|
|
assert s.discord_bot_token == "discord_token_123"
|
|
|
|
def test_empty_discord_bot_token_to_none(self, monkeypatch):
|
|
from config.settings import Settings
|
|
|
|
monkeypatch.setenv("DISCORD_BOT_TOKEN", "")
|
|
s = Settings()
|
|
assert s.discord_bot_token is None
|
|
|
|
def test_allowed_discord_channels_from_env(self, monkeypatch):
|
|
from config.settings import Settings
|
|
|
|
monkeypatch.setenv("ALLOWED_DISCORD_CHANNELS", "111,222,333")
|
|
s = Settings()
|
|
assert s.allowed_discord_channels == "111,222,333"
|
|
|
|
def test_messaging_platform_from_env(self, monkeypatch):
|
|
from config.settings import Settings
|
|
|
|
monkeypatch.setenv("MESSAGING_PLATFORM", "discord")
|
|
s = Settings()
|
|
assert s.messaging_platform == "discord"
|
|
|
|
def test_whisper_device_auto_rejected(self, monkeypatch):
|
|
"""WHISPER_DEVICE=auto raises ValidationError (auto removed)."""
|
|
from config.settings import Settings
|
|
|
|
monkeypatch.setenv("WHISPER_DEVICE", "auto")
|
|
with pytest.raises(ValidationError, match="whisper_device"):
|
|
Settings()
|
|
|
|
@pytest.mark.parametrize("device", ["cpu", "cuda"])
|
|
def test_whisper_device_valid(self, monkeypatch, device):
|
|
"""Valid whisper_device values are accepted."""
|
|
from config.settings import Settings
|
|
|
|
monkeypatch.setenv("WHISPER_DEVICE", device)
|
|
s = Settings()
|
|
assert s.whisper_device == device
|
|
|
|
|
|
class TestPerTierModelMapping:
|
|
"""Test per-tier model fields and resolve_model()."""
|
|
|
|
def test_tier_fields_default_none(self):
|
|
"""Per-tier model fields default to None."""
|
|
from config.settings import Settings
|
|
|
|
s = Settings()
|
|
assert s.model_opus is None
|
|
assert s.model_sonnet is None
|
|
assert s.model_haiku is None
|
|
|
|
def test_model_opus_from_env(self, monkeypatch):
|
|
"""MODEL_OPUS env var is loaded."""
|
|
from config.settings import Settings
|
|
|
|
monkeypatch.setenv("MODEL_OPUS", "open_router/deepseek/deepseek-r1")
|
|
s = Settings()
|
|
assert s.model_opus == "open_router/deepseek/deepseek-r1"
|
|
|
|
def test_model_sonnet_from_env(self, monkeypatch):
|
|
"""MODEL_SONNET env var is loaded."""
|
|
from config.settings import Settings
|
|
|
|
monkeypatch.setenv("MODEL_SONNET", "nvidia_nim/meta/llama-3.3-70b-instruct")
|
|
s = Settings()
|
|
assert s.model_sonnet == "nvidia_nim/meta/llama-3.3-70b-instruct"
|
|
|
|
def test_model_haiku_from_env(self, monkeypatch):
|
|
"""MODEL_HAIKU env var is loaded."""
|
|
from config.settings import Settings
|
|
|
|
monkeypatch.setenv("MODEL_HAIKU", "lmstudio/qwen2.5-7b")
|
|
s = Settings()
|
|
assert s.model_haiku == "lmstudio/qwen2.5-7b"
|
|
|
|
def test_model_opus_invalid_provider_raises(self, monkeypatch):
|
|
"""MODEL_OPUS with invalid provider prefix raises ValidationError."""
|
|
from config.settings import Settings
|
|
|
|
monkeypatch.setenv("MODEL_OPUS", "bad_provider/some-model")
|
|
with pytest.raises(ValidationError, match="Invalid provider"):
|
|
Settings()
|
|
|
|
def test_model_opus_no_slash_raises(self, monkeypatch):
|
|
"""MODEL_OPUS without provider prefix raises ValidationError."""
|
|
from config.settings import Settings
|
|
|
|
monkeypatch.setenv("MODEL_OPUS", "noprefix")
|
|
with pytest.raises(ValidationError, match="provider type"):
|
|
Settings()
|
|
|
|
def test_model_haiku_invalid_provider_raises(self, monkeypatch):
|
|
"""MODEL_HAIKU with invalid provider prefix raises ValidationError."""
|
|
from config.settings import Settings
|
|
|
|
monkeypatch.setenv("MODEL_HAIKU", "invalid/model")
|
|
with pytest.raises(ValidationError, match="Invalid provider"):
|
|
Settings()
|
|
|
|
def test_resolve_model_opus_override(self):
|
|
"""resolve_model returns model_opus for opus model names."""
|
|
from config.settings import Settings
|
|
|
|
s = Settings()
|
|
s.model_opus = "open_router/deepseek/deepseek-r1"
|
|
assert (
|
|
s.resolve_model("claude-opus-4-20250514")
|
|
== "open_router/deepseek/deepseek-r1"
|
|
)
|
|
assert s.resolve_model("claude-3-opus") == "open_router/deepseek/deepseek-r1"
|
|
assert (
|
|
s.resolve_model("claude-3-opus-20240229")
|
|
== "open_router/deepseek/deepseek-r1"
|
|
)
|
|
|
|
def test_resolve_model_sonnet_override(self):
|
|
"""resolve_model returns model_sonnet for sonnet model names."""
|
|
from config.settings import Settings
|
|
|
|
s = Settings()
|
|
s.model_sonnet = "nvidia_nim/meta/llama-3.3-70b-instruct"
|
|
assert (
|
|
s.resolve_model("claude-sonnet-4-20250514")
|
|
== "nvidia_nim/meta/llama-3.3-70b-instruct"
|
|
)
|
|
assert (
|
|
s.resolve_model("claude-3-5-sonnet-20241022")
|
|
== "nvidia_nim/meta/llama-3.3-70b-instruct"
|
|
)
|
|
|
|
def test_resolve_model_haiku_override(self):
|
|
"""resolve_model returns model_haiku for haiku model names."""
|
|
from config.settings import Settings
|
|
|
|
s = Settings()
|
|
s.model_haiku = "lmstudio/qwen2.5-7b"
|
|
assert s.resolve_model("claude-3-haiku-20240307") == "lmstudio/qwen2.5-7b"
|
|
assert s.resolve_model("claude-3-5-haiku-20241022") == "lmstudio/qwen2.5-7b"
|
|
assert s.resolve_model("claude-haiku-4-20250514") == "lmstudio/qwen2.5-7b"
|
|
|
|
def test_resolve_model_fallback_when_tier_not_set(self):
|
|
"""resolve_model falls back to MODEL when tier override is None."""
|
|
from config.settings import Settings
|
|
|
|
s = Settings()
|
|
s.model = "nvidia_nim/fallback-model"
|
|
# No tier overrides set
|
|
assert s.resolve_model("claude-opus-4-20250514") == "nvidia_nim/fallback-model"
|
|
assert (
|
|
s.resolve_model("claude-sonnet-4-20250514") == "nvidia_nim/fallback-model"
|
|
)
|
|
assert s.resolve_model("claude-3-haiku-20240307") == "nvidia_nim/fallback-model"
|
|
|
|
def test_resolve_model_unknown_tier_falls_back(self):
|
|
"""resolve_model falls back to MODEL for unrecognized model names."""
|
|
from config.settings import Settings
|
|
|
|
s = Settings()
|
|
s.model = "nvidia_nim/fallback-model"
|
|
s.model_opus = "open_router/opus-model"
|
|
assert s.resolve_model("claude-2.1") == "nvidia_nim/fallback-model"
|
|
assert s.resolve_model("some-unknown-model") == "nvidia_nim/fallback-model"
|
|
|
|
def test_resolve_model_case_insensitive(self):
|
|
"""Tier classification is case-insensitive."""
|
|
from config.settings import Settings
|
|
|
|
s = Settings()
|
|
s.model_opus = "open_router/opus-model"
|
|
assert s.resolve_model("Claude-OPUS-4") == "open_router/opus-model"
|
|
|
|
def test_parse_provider_type(self):
|
|
"""parse_provider_type extracts provider from model string."""
|
|
from config.settings import Settings
|
|
|
|
assert Settings.parse_provider_type("nvidia_nim/meta/llama") == "nvidia_nim"
|
|
assert Settings.parse_provider_type("open_router/deepseek/r1") == "open_router"
|
|
assert Settings.parse_provider_type("lmstudio/qwen") == "lmstudio"
|
|
|
|
def test_parse_model_name(self):
|
|
"""parse_model_name extracts model name from model string."""
|
|
from config.settings import Settings
|
|
|
|
assert Settings.parse_model_name("nvidia_nim/meta/llama") == "meta/llama"
|
|
assert Settings.parse_model_name("lmstudio/qwen") == "qwen"
|