fix(backend): default max_tokens for Anthropic model validation (#1549)
Some checks are pending
CodeQL Advanced / Analyze (python) (push) Waiting to run
CodeQL Advanced / Analyze (actions) (push) Waiting to run
CodeQL Advanced / Analyze (javascript-typescript) (push) Waiting to run
Pre-commit / pre-commit (push) Waiting to run
Test / Run Python Tests (push) Waiting to run

Co-authored-by: Cursor Agent <cursoragent@cursor.com>
Co-authored-by: emag165 <emag165@users.noreply.github.com>
Co-authored-by: Sun Tao <2605127667@qq.com>
This commit is contained in:
emag165 2026-04-11 22:02:21 +08:00 committed by GitHub
parent 53ef106b3e
commit eacda32c08
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 13 additions and 5 deletions

View file

@ -136,11 +136,11 @@ def agent_model(
)
model_platform_enum = None
if (
effective_config["model_platform"].lower() == "anthropic"
and model_config.get("cache_control") is None
):
model_config["cache_control"] = "5m"
if effective_config["model_platform"].lower() == "anthropic":
if model_config.get("cache_control") is None:
model_config["cache_control"] = "5m"
if model_config.get("max_tokens") is None:
model_config["max_tokens"] = 64000
model = ModelFactory.create(
model_platform=effective_config["model_platform"],

View file

@ -227,6 +227,10 @@ def create_agent(
raise ValueError(f"Invalid model_type: {model_type}")
if platform is None:
raise ValueError(f"Invalid model_platform: {model_platform}")
if str(platform).lower() == "anthropic":
model_config_dict = dict(model_config_dict or {})
if model_config_dict.get("max_tokens") is None:
model_config_dict["max_tokens"] = 4096
model = ModelFactory.create(
model_platform=platform,
model_type=mtype,
@ -326,6 +330,10 @@ def validate_model_with_details(
"Creating model",
extra={"platform": model_platform, "model_type": model_type},
)
if str(model_platform).lower() == "anthropic":
model_config_dict = dict(model_config_dict or {})
if model_config_dict.get("max_tokens") is None:
model_config_dict["max_tokens"] = 4096
model = ModelFactory.create(
model_platform=model_platform,
model_type=model_type,