Skip to content

Commit 8793a65

Browse files
committed
Re-enable repo consistency checks
1 parent 707daf9 commit 8793a65

File tree

5 files changed

+9
-6
lines changed

5 files changed

+9
-6
lines changed

.github/workflows/tests_torch.yml

+2-1
Original file line numberDiff line numberDiff line change
@@ -37,9 +37,10 @@ jobs:
3737
run: |
3838
pip install torch==1.12.1
3939
pip install .[quality]
40-
- name: Check Quality
40+
- name: Check Quality and Repo Consistency
4141
run: |
4242
make quality
43+
make repo-consistency
4344
run_reduced_tests_torch:
4445
timeout-minutes: 60
4546
runs-on: ubuntu-latest

Makefile

+1-1
Original file line numberDiff line numberDiff line change
@@ -37,11 +37,11 @@ autogenerate_code: deps_table_update
3737
# python utils/check_copies.py
3838
# python utils/check_table.py
3939
# python utils/check_dummies.py
40+
# python utils/tests_fetcher.py --sanity_check
4041
repo-consistency:
4142
python utils/check_repo.py
4243
python utils/check_inits.py
4344
python utils/check_config_docstrings.py
44-
python utils/tests_fetcher.py --sanity_check
4545
python utils/check_adapters.py
4646

4747
# this target runs checks on all files

src/transformers/models/beit/modeling_beit.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@
2929
from ...adapters.context import ForwardContext
3030
from ...adapters.lora import Linear as LoRALinear
3131
from ...adapters.mixins.beit import BeitLayerAdaptersMixin, BeitModelAdaptersMixin, BeitModelWithHeadsAdaptersMixin
32+
from ...adapters.model_mixin import ModelWithHeadsAdaptersMixin
3233
from ...adapters.prefix_tuning import PrefixTuningShim
3334
from ...modeling_outputs import (
3435
BaseModelOutput,
@@ -1176,7 +1177,7 @@ def forward(self, encoder_hidden_states: torch.Tensor) -> torch.Tensor:
11761177
""",
11771178
BEIT_START_DOCSTRING,
11781179
)
1179-
class BeitForSemanticSegmentation(BeitPreTrainedModel):
1180+
class BeitForSemanticSegmentation(ModelWithHeadsAdaptersMixin, BeitPreTrainedModel):
11801181
def __init__(self, config: BeitConfig) -> None:
11811182
super().__init__(config)
11821183

utils/check_adapters.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
"encoder_decoder",
1717
"t5",
1818
"deberta",
19-
"deberta-v2",
19+
"deberta_v2",
2020
"vit",
2121
]
2222

utils/check_inits.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def parse_init(init_file):
6161
Read an init_file and parse (per backend) the _import_structure objects defined and the TYPE_CHECKING objects
6262
defined
6363
"""
64-
with open(init_file, "r", encoding="utf-8", newline="\n") as f:
64+
with open(init_file, "r", encoding="utf-8") as f:
6565
lines = f.readlines()
6666

6767
line_index = 0
@@ -296,4 +296,5 @@ def check_submodules():
296296

297297
if __name__ == "__main__":
298298
check_all_inits()
299-
check_submodules()
299+
# For AH: adapter submodules are not all registered in the main init of Transformers.
300+
# check_submodules()

0 commit comments

Comments
 (0)