Skip to content

Commit

Permalink
remove on vlms
Browse files Browse the repository at this point in the history
  • Loading branch information
gante committed Sep 20, 2024
1 parent 8bfe7c8 commit 0382898
Show file tree
Hide file tree
Showing 9 changed files with 0 additions and 9 deletions.
1 change: 0 additions & 1 deletion tests/models/llava/test_modeling_llava.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,6 @@ class LlavaForConditionalGenerationModelTest(ModelTesterMixin, GenerationTesterM
"""

all_model_classes = (LlavaForConditionalGeneration,) if is_torch_available() else ()
all_generative_model_classes = (LlavaForConditionalGeneration,) if is_torch_available() else ()
pipeline_model_mapping = {"image-to-text": LlavaForConditionalGeneration} if is_torch_available() else {}
test_pruning = False
test_head_masking = False
Expand Down
1 change: 0 additions & 1 deletion tests/models/llava_next/test_modeling_llava_next.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,6 @@ class LlavaNextForConditionalGenerationModelTest(ModelTesterMixin, GenerationTes
"""

all_model_classes = (LlavaNextForConditionalGeneration,) if is_torch_available() else ()
all_generative_model_classes = (LlavaNextForConditionalGeneration,) if is_torch_available() else ()
test_pruning = False
test_head_masking = False

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,6 @@ class LlavaNextVideoForConditionalGenerationModelTest(ModelTesterMixin, Generati
"""

all_model_classes = (LlavaNextVideoForConditionalGeneration,) if is_torch_available() else ()
all_generative_model_classes = (LlavaNextVideoForConditionalGeneration,) if is_torch_available() else ()
test_pruning = False
test_head_masking = False

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,6 @@ class LlavaOnevisionForConditionalGenerationModelTest(ModelTesterMixin, Generati
"""

all_model_classes = (LlavaOnevisionForConditionalGeneration,) if is_torch_available() else ()
all_generative_model_classes = (LlavaOnevisionForConditionalGeneration,) if is_torch_available() else ()
test_pruning = False
test_head_masking = False

Expand Down
1 change: 0 additions & 1 deletion tests/models/olmoe/test_modeling_olmoe.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,6 @@ def prepare_config_and_inputs_for_common(self):
@require_torch
class OlmoeModelTest(ModelTesterMixin, GenerationTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (OlmoeModel, OlmoeForCausalLM) if is_torch_available() else ()
all_generative_model_classes = (OlmoeForCausalLM,) if is_torch_available() else ()
pipeline_model_mapping = (
{
"feature-extraction": OlmoeModel,
Expand Down
1 change: 0 additions & 1 deletion tests/models/paligemma/test_modeling_paligemma.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,6 @@ class PaliGemmaForConditionalGenerationModelTest(ModelTesterMixin, GenerationTes
"""

all_model_classes = (PaliGemmaForConditionalGeneration,) if is_torch_available() else ()
all_generative_model_classes = (PaliGemmaForConditionalGeneration,) if is_torch_available() else ()
fx_compatible = False
test_pruning = False
test_torchscript = False
Expand Down
1 change: 0 additions & 1 deletion tests/models/qwen2_vl/test_modeling_qwen2_vl.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,7 +223,6 @@ class Qwen2VLModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCas
"""

all_model_classes = (Qwen2VLForConditionalGeneration,) if is_torch_available() else ()
all_generative_model_classes = (Qwen2VLForConditionalGeneration,) if is_torch_available() else ()
test_pruning = False
test_head_masking = False

Expand Down
1 change: 0 additions & 1 deletion tests/models/video_llava/test_modeling_video_llava.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,6 @@ class VideoLlavaForConditionalGenerationModelTest(ModelTesterMixin, GenerationTe
"""

all_model_classes = (VideoLlavaForConditionalGeneration,) if is_torch_available() else ()
all_generative_model_classes = (VideoLlavaForConditionalGeneration,) if is_torch_available() else ()
fx_compatible = False
test_pruning = False
test_resize_embeddings = True
Expand Down
1 change: 0 additions & 1 deletion tests/models/vipllava/test_modeling_vipllava.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,6 @@ class VipLlavaForConditionalGenerationModelTest(ModelTesterMixin, GenerationTest
"""

all_model_classes = (VipLlavaForConditionalGeneration,) if is_torch_available() else ()
all_generative_model_classes = (VipLlavaForConditionalGeneration,) if is_torch_available() else ()
fx_compatible = False
test_pruning = False
test_resize_embeddings = True
Expand Down

0 comments on commit 0382898

Please sign in to comment.