Skip to content

Commit

Permalink
Rename test_model_common_attributes -> test_model_get_set_embeddings (h…
Browse files Browse the repository at this point in the history
…uggingface#31321)

* Rename to test_model_common_attributes
The method name is misleading - it is testing being able to get and set embeddings, not common attributes to all models

* Explicitly skip
  • Loading branch information
amyeroberts authored Jun 7, 2024
1 parent c1be42f commit 25245ec
Show file tree
Hide file tree
Showing 108 changed files with 167 additions and 145 deletions.
2 changes: 1 addition & 1 deletion conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@
"test_torch_save_load",
"test_initialization",
"test_forward_signature",
"test_model_common_attributes",
"test_model_get_set_embeddings",
"test_model_main_input_name",
"test_correct_missing_keys",
"test_tie_model_weights",
Expand Down
4 changes: 2 additions & 2 deletions tests/models/align/test_modeling_align.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ def test_inputs_embeds_matches_input_ids(self):
pass

@unittest.skip(reason="AlignVisionModel does not support input and output embeddings")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

def test_forward_signature(self):
Expand Down Expand Up @@ -489,7 +489,7 @@ def test_retain_grad_hidden_states_attentions(self):
pass

@unittest.skip(reason="AlignModel does not have input/output embeddings")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

# override as the `temperature` parameter initilization is different for ALIGN
Expand Down
4 changes: 2 additions & 2 deletions tests/models/altclip/test_modeling_altclip.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ def test_config(self):
def test_inputs_embeds(self):
pass

def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
config, _ = self.model_tester.prepare_config_and_inputs_for_common()

for model_class in self.all_model_classes:
Expand Down Expand Up @@ -459,7 +459,7 @@ def test_retain_grad_hidden_states_attentions(self):
pass

@unittest.skip(reason="CLIPModel does not have input/output embeddings")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

# override as the `logit_scale` parameter initilization is different for AltCLIP
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ def test_config(self):
def test_inputs_embeds(self):
pass

def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
config, _ = self.model_tester.prepare_config_and_inputs_for_common()

for model_class in self.all_model_classes:
Expand Down
5 changes: 4 additions & 1 deletion tests/models/autoformer/test_modeling_autoformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,6 @@ class AutoformerModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCa
test_missing_keys = False
test_torchscript = False
test_inputs_embeds = False
test_model_common_attributes = False

def setUp(self):
self.model_tester = AutoformerModelTester(self)
Expand Down Expand Up @@ -403,6 +402,10 @@ def test_attention_outputs(self):
def test_retain_grad_hidden_states_attentions(self):
super().test_retain_grad_hidden_states_attentions()

@unittest.skip(reason="Model does not have input embeddings")
def test_model_get_set_embeddings(self):
pass


def prepare_batch(filename="train-batch.pt"):
file = hf_hub_download(repo_id="hf-internal-testing/tourism-monthly-batch", filename=filename, repo_type="dataset")
Expand Down
2 changes: 1 addition & 1 deletion tests/models/bark/test_modeling_bark.py
Original file line number Diff line number Diff line change
Expand Up @@ -809,7 +809,7 @@ def test_forward_signature(self):
expected_arg_names = ["codebook_idx", "input_ids"]
self.assertListEqual(arg_names[:2], expected_arg_names)

def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
# one embedding layer per codebook
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()

Expand Down
2 changes: 1 addition & 1 deletion tests/models/beit/test_modeling_beit.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,7 @@ def test_multi_gpu_data_parallel_forward(self):
def test_feed_forward_chunking(self):
pass

def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
config, _ = self.model_tester.prepare_config_and_inputs_for_common()

for model_class in self.all_model_classes:
Expand Down
2 changes: 1 addition & 1 deletion tests/models/bit/test_modeling_bit.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ def test_inputs_embeds(self):
pass

@unittest.skip(reason="Bit does not support input and output embeddings")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

def test_model(self):
Expand Down
10 changes: 5 additions & 5 deletions tests/models/blip/test_modeling_blip.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ def test_config(self):
def test_inputs_embeds(self):
pass

def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
config, _ = self.model_tester.prepare_config_and_inputs_for_common()

for model_class in self.all_model_classes:
Expand Down Expand Up @@ -462,7 +462,7 @@ def test_retain_grad_hidden_states_attentions(self):
pass

@unittest.skip(reason="BlipModel does not have input/output embeddings")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

# override as the `logit_scale` parameter initilization is different for Blip
Expand Down Expand Up @@ -867,7 +867,7 @@ def test_inputs_embeds(self):
pass

@unittest.skip(reason="BlipModel does not have input/output embeddings")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass


Expand Down Expand Up @@ -901,7 +901,7 @@ def test_retain_grad_hidden_states_attentions(self):
pass

@unittest.skip(reason="BlipModel does not have input/output embeddings")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

def test_forward_signature(self):
Expand Down Expand Up @@ -1129,7 +1129,7 @@ def test_retain_grad_hidden_states_attentions(self):
pass

@unittest.skip(reason="BlipModel does not have input/output embeddings")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

def test_forward_signature(self):
Expand Down
6 changes: 3 additions & 3 deletions tests/models/blip_2/test_modeling_blip_2.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ def test_config(self):
def test_inputs_embeds(self):
pass

def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
config, _ = self.model_tester.prepare_config_and_inputs_for_common()

for model_class in self.all_model_classes:
Expand Down Expand Up @@ -463,7 +463,7 @@ def test_retain_grad_hidden_states_attentions(self):
pass

@unittest.skip(reason="Blip2Model does not have input/output embeddings")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

@unittest.skip(reason="There's no base Blip2Model")
Expand Down Expand Up @@ -722,7 +722,7 @@ def test_retain_grad_hidden_states_attentions(self):
pass

@unittest.skip(reason="Blip2Model does not have input/output embeddings")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

@unittest.skip(reason="There's no base Blip2Model")
Expand Down
2 changes: 1 addition & 1 deletion tests/models/bridgetower/test_modeling_bridgetower.py
Original file line number Diff line number Diff line change
Expand Up @@ -499,7 +499,7 @@ def test_initialization(self):
)

@unittest.skip(reason="""Bridge Tower does not have input/output embeddings. So this test is not applicable.""")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

@unittest.skip(reason="""Bridge Tower does not have input/output embeddings. Thus this test is not applicable.""")
Expand Down
2 changes: 1 addition & 1 deletion tests/models/canine/test_modeling_canine.py
Original file line number Diff line number Diff line change
Expand Up @@ -506,7 +506,7 @@ def test_inputs_embeds_matches_input_ids(self):
pass

@unittest.skip("CANINE does not have a get_input_embeddings() method.")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

@unittest.skip(
Expand Down
4 changes: 2 additions & 2 deletions tests/models/chinese_clip/test_modeling_chinese_clip.py
Original file line number Diff line number Diff line change
Expand Up @@ -441,7 +441,7 @@ def test_config(self):
def test_inputs_embeds(self):
pass

def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
config, _ = self.model_tester.prepare_config_and_inputs_for_common()

for model_class in self.all_model_classes:
Expand Down Expand Up @@ -589,7 +589,7 @@ def test_retain_grad_hidden_states_attentions(self):
pass

@unittest.skip(reason="ChineseCLIPModel does not have input/output embeddings")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

# override as the `logit_scale` parameter initilization is different for CHINESE_CLIP
Expand Down
4 changes: 2 additions & 2 deletions tests/models/clap/test_modeling_clap.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ def test_config(self):
def test_inputs_embeds(self):
pass

def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
config, _ = self.model_tester.prepare_config_and_inputs_for_common()

for model_class in self.all_model_classes:
Expand Down Expand Up @@ -533,7 +533,7 @@ def test_retain_grad_hidden_states_attentions(self):
pass

@unittest.skip(reason="ClapModel does not have input/output embeddings")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

# override as the `logit_scale` parameter initilization is different for CLAP
Expand Down
6 changes: 3 additions & 3 deletions tests/models/clip/test_modeling_clip.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ def test_config(self):
def test_inputs_embeds(self):
pass

def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
config, _ = self.model_tester.prepare_config_and_inputs_for_common()

for model_class in self.all_model_classes:
Expand Down Expand Up @@ -506,7 +506,7 @@ def test_retain_grad_hidden_states_attentions(self):
pass

@unittest.skip(reason="CLIPModel does not have input/output embeddings")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

# override as the `logit_scale` parameter initilization is different for CLIP
Expand Down Expand Up @@ -783,7 +783,7 @@ def test_inputs_embeds(self):
pass

@unittest.skip(reason="CLIPForImageClassification does not support inputs_embeds")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

@unittest.skip(reason="CLIPForImageClassification does not support gradient checkpointing yet")
Expand Down
4 changes: 2 additions & 2 deletions tests/models/clipseg/test_modeling_clipseg.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ def test_config(self):
def test_inputs_embeds(self):
pass

def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
config, _ = self.model_tester.prepare_config_and_inputs_for_common()

for model_class in self.all_model_classes:
Expand Down Expand Up @@ -490,7 +490,7 @@ def test_retain_grad_hidden_states_attentions(self):
pass

@unittest.skip(reason="CLIPSegModel does not have input/output embeddings")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

@unittest.skip(
Expand Down
2 changes: 1 addition & 1 deletion tests/models/clvp/test_modeling_clvp.py
Original file line number Diff line number Diff line change
Expand Up @@ -489,7 +489,7 @@ def test_inputs_embeds(self):
pass

@unittest.skip(reason="ClvpModelForConditionalGeneration does not have get_input_embeddings")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

# override as the `logit_scale` parameter initilization is different for Clvp
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ def test_inputs_embeds_matches_input_ids(self):
pass

@unittest.skip(reason="Conditional DETR does not have a get_input_embeddings method")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

@unittest.skip(reason="Conditional DETR is not a generative model")
Expand Down
2 changes: 1 addition & 1 deletion tests/models/convnext/test_modeling_convnext.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ def test_inputs_embeds(self):
pass

@unittest.skip(reason="ConvNext does not support input and output embeddings")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

@unittest.skip(reason="ConvNext does not use feedforward chunking")
Expand Down
2 changes: 1 addition & 1 deletion tests/models/convnextv2/test_modeling_convnextv2.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ def test_inputs_embeds(self):
pass

@unittest.skip(reason="ConvNextV2 does not support input and output embeddings")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

@unittest.skip(reason="ConvNextV2 does not use feedforward chunking")
Expand Down
2 changes: 1 addition & 1 deletion tests/models/cvt/test_modeling_cvt.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ def test_inputs_embeds(self):
pass

@unittest.skip(reason="Cvt does not support input and output embeddings")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

def test_model(self):
Expand Down
2 changes: 1 addition & 1 deletion tests/models/data2vec/test_modeling_data2vec_audio.py
Original file line number Diff line number Diff line change
Expand Up @@ -442,7 +442,7 @@ def test_resize_tokens_embeddings(self):
# Data2VecAudio has no inputs_embeds
# and thus the `get_input_embeddings` fn
# is not implemented
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

@is_pt_flax_cross_test
Expand Down
2 changes: 1 addition & 1 deletion tests/models/data2vec/test_modeling_data2vec_vision.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,7 @@ def test_inputs_embeds(self):
def test_multi_gpu_data_parallel_forward(self):
pass

def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
config, _ = self.model_tester.prepare_config_and_inputs_for_common()

for model_class in self.all_model_classes:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,6 @@ class DecisionTransformerModelTest(ModelTesterMixin, GenerationTesterMixin, Pipe
test_attention_outputs = False
test_hidden_states_output = False
test_inputs_embeds = False
test_model_common_attributes = False
test_gradient_checkpointing = False
test_torchscript = False

Expand Down Expand Up @@ -184,6 +183,10 @@ def test_forward_signature(self):

self.assertListEqual(arg_names[: len(expected_arg_names)], expected_arg_names)

@unittest.skip(reason="Model does not have input embeddings")
def test_model_get_set_embeddings(self):
pass


@require_torch
class DecisionTransformerModelIntegrationTest(unittest.TestCase):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ def test_inputs_embeds_matches_input_ids(self):
pass

@unittest.skip(reason="Deformable DETR does not have a get_input_embeddings method")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

@unittest.skip(reason="Deformable DETR is not a generative model")
Expand Down
2 changes: 1 addition & 1 deletion tests/models/deit/test_modeling_deit.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ def test_config(self):
def test_inputs_embeds(self):
pass

def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
config, _ = self.model_tester.prepare_config_and_inputs_for_common()

for model_class in self.all_model_classes:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ def test_training_gradient_checkpointing(self):
pass

@unittest.skip(reason="Depth Anything with AutoBackbone does not have a base model and hence no input_embeddings")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

@unittest.skip(reason="Depth Anything with AutoBackbone does not have a base model")
Expand Down
2 changes: 1 addition & 1 deletion tests/models/detr/test_modeling_detr.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ def test_inputs_embeds_matches_input_ids(self):
pass

@unittest.skip(reason="DETR does not have a get_input_embeddings method")
def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
pass

@unittest.skip(reason="DETR is not a generative model")
Expand Down
2 changes: 1 addition & 1 deletion tests/models/dinat/test_modeling_dinat.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ def test_inputs_embeds(self):
def test_feed_forward_chunking(self):
pass

def test_model_common_attributes(self):
def test_model_get_set_embeddings(self):
config, _ = self.model_tester.prepare_config_and_inputs_for_common()

for model_class in self.all_model_classes:
Expand Down
Loading

0 comments on commit 25245ec

Please sign in to comment.