From 8fcf682b280f58be512e83cd7c12a551ded9d0ff Mon Sep 17 00:00:00 2001 From: HimariO Date: Thu, 3 Apr 2025 22:52:44 +0800 Subject: [PATCH] ignore transformers Qwen2_5_xxx type check --- examples/llava/qwen2_vl_surgery.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/llava/qwen2_vl_surgery.py b/examples/llava/qwen2_vl_surgery.py index 9d4ad8932..0a47a719f 100644 --- a/examples/llava/qwen2_vl_surgery.py +++ b/examples/llava/qwen2_vl_surgery.py @@ -6,11 +6,11 @@ import numpy as np from gguf import * from transformers import ( AutoProcessor, - Qwen2VLForConditionalGeneration, - Qwen2_5_VLForConditionalGeneration, - Qwen2VLProcessor, Qwen2VLConfig, - Qwen2_5_VLConfig, + Qwen2VLProcessor, + Qwen2VLForConditionalGeneration, + Qwen2_5_VLConfig, # type: ignore[reportAttributeAccessIssue] + Qwen2_5_VLForConditionalGeneration, # type: ignore[reportAttributeAccessIssue] )