Skip to content

Commit 27e30f8

Browse files
CI hotfix: xfail test_training_with_transformers_paged (#4046)
1 parent af82b38 commit 27e30f8

File tree

2 files changed

+8
-1
lines changed

2 files changed

+8
-1
lines changed

tests/slow/test_grpo_slow.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,10 @@
1919
import numpy as np
2020
import pytest
2121
import torch
22+
import transformers
2223
from accelerate.utils.memory import release_memory
2324
from datasets import Dataset, Features, Image, Value, load_dataset
25+
from packaging.version import Version
2426
from parameterized import parameterized
2527
from transformers import (
2628
AutoModelForCausalLM,
@@ -171,6 +173,8 @@ def test_training_with_liger_grpo_loss_and_peft(self, model_name):
171173
@parameterized.expand(MODELS_TO_TEST)
172174
def test_training_with_transformers_paged(self, model_name):
173175
"""Test that training works with transformers paged implementation (requires GPU)."""
176+
if Version(transformers.__version__) < Version("4.56.2"):
177+
pytest.xfail("Upstream bug in transformers (GH#40692). Fix merged; awaiting release >= 4.56.2")
174178
training_args = GRPOConfig(
175179
output_dir=self.tmp_dir,
176180
learning_rate=0.1, # increase the learning rate to speed up the test

tests/test_online_dpo_trainer.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,9 +12,10 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
1615
import pytest
16+
import transformers
1717
from datasets import Dataset, features, load_dataset
18+
from packaging.version import Version
1819
from parameterized import parameterized
1920
from transformers import AutoModelForCausalLM, AutoModelForSequenceClassification, AutoTokenizer
2021
from transformers.testing_utils import require_peft, require_torch_accelerator, require_vision
@@ -421,6 +422,8 @@ def test_generation_config_setup(self):
421422
@require_torch_accelerator
422423
@parameterized.expand([("standard_prompt_only",), ("conversational_prompt_only",)])
423424
def test_training_with_transformers_paged(self, config_name):
425+
if Version(transformers.__version__) < Version("4.56.2"):
426+
pytest.xfail("Upstream bug in transformers (GH#40692). Fix merged; awaiting release >= 4.56.2")
424427
training_args = OnlineDPOConfig(
425428
output_dir=self.tmp_dir,
426429
per_device_train_batch_size=2,

0 commit comments

Comments
 (0)