feat: add native LLaMA.cpp local provider support (#1346)

Co-authored-by: bytecii <bytecii@users.noreply.github.com>
Co-authored-by: bytecii <994513625@qq.com>
This commit is contained in:
it-education-md 2026-03-02 19:45:02 -05:00 committed by GitHub
parent 478926d33f
commit d606fae458
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 499 additions and 262 deletions

View file

@ -20,6 +20,7 @@ PLATFORM_ALIAS_MAPPING: Final[dict[str, str]] = {
"z.ai": "openai-compatible-model",
"ModelArk": "openai-compatible-model",
"grok": "openai-compatible-model",
"llama.cpp": "openai-compatible-model",
}

View file

@ -26,6 +26,7 @@ def test_normalize_model_platform_maps_known_aliases():
assert normalize_model_platform("grok") == "openai-compatible-model"
assert normalize_model_platform("z.ai") == "openai-compatible-model"
assert normalize_model_platform("ModelArk") == "openai-compatible-model"
assert normalize_model_platform("llama.cpp") == "openai-compatible-model"
def test_normalize_model_platform_keeps_non_alias_unchanged():
@ -43,7 +44,7 @@ def test_normalized_model_platform_type_applies_in_pydantic_model():
optional_model_platform: NormalizedOptionalModelPlatform = None
item = _Model(
model_platform="grok",
model_platform="llama.cpp",
optional_model_platform="ModelArk",
)