From 1bd56cbf4a63348a1f18ea906148ab7cb687c615 Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 23 Oct 2025 01:46:10 +0000 Subject: [PATCH 01/12] refactor(tests): Refactor json_infra using `pytest_collect_file` --- .../evm_tools/statetest/__init__.py | 65 +++-- tests/json_infra/conftest.py | 245 +++++++++++++++++- .../helpers/load_blockchain_tests.py | 10 +- tests/json_infra/helpers/load_state_tests.py | 22 +- tests/json_infra/test_blockchain_tests.py | 40 --- tests/json_infra/test_state_tests.py | 32 --- 6 files changed, 297 insertions(+), 117 deletions(-) delete mode 100644 tests/json_infra/test_blockchain_tests.py delete mode 100644 tests/json_infra/test_state_tests.py diff --git a/src/ethereum_spec_tools/evm_tools/statetest/__init__.py b/src/ethereum_spec_tools/evm_tools/statetest/__init__.py index cd58cb3028..8015d43672 100644 --- a/src/ethereum_spec_tools/evm_tools/statetest/__init__.py +++ b/src/ethereum_spec_tools/evm_tools/statetest/__init__.py @@ -9,7 +9,7 @@ from copy import deepcopy from dataclasses import dataclass from io import StringIO -from typing import Any, Dict, Iterable, List, Optional, TextIO +from typing import Any, Dict, Generator, Iterable, List, Optional, TextIO from ethereum.utils.hexadecimal import hex_to_bytes @@ -35,6 +35,41 @@ class TestCase: transaction: Dict +def read_test_case( + test_file_path: str, key: str, test: Dict[str, Any] +) -> Generator[TestCase, None, None]: + """ + Given a key and a value, return a `TestCase` object. + """ + env = test["env"] + if not isinstance(env, dict): + raise TypeError("env not dict") + + pre = test["pre"] + if not isinstance(pre, dict): + raise TypeError("pre not dict") + + transaction = test["transaction"] + if not isinstance(transaction, dict): + raise TypeError("transaction not dict") + + for fork_name, content in test["post"].items(): + for idx, post in enumerate(content): + if not isinstance(post, dict): + raise TypeError(f'post["{fork_name}"] not dict') + + yield TestCase( + path=test_file_path, + key=key, + index=idx, + fork_name=fork_name, + post=post, + env=env, + pre=pre, + transaction=transaction, + ) + + def read_test_cases(test_file_path: str) -> Iterable[TestCase]: """ Given a path to a filled state test in JSON format, return all the @@ -44,33 +79,7 @@ def read_test_cases(test_file_path: str) -> Iterable[TestCase]: tests = json.load(test_file) for key, test in tests.items(): - env = test["env"] - if not isinstance(env, dict): - raise TypeError("env not dict") - - pre = test["pre"] - if not isinstance(pre, dict): - raise TypeError("pre not dict") - - transaction = test["transaction"] - if not isinstance(transaction, dict): - raise TypeError("transaction not dict") - - for fork_name, content in test["post"].items(): - for idx, post in enumerate(content): - if not isinstance(post, dict): - raise TypeError(f'post["{fork_name}"] not dict') - - yield TestCase( - path=test_file_path, - key=key, - index=idx, - fork_name=fork_name, - post=post, - env=env, - pre=pre, - transaction=transaction, - ) + yield from read_test_case(test_file_path, key, test) def run_test_case( diff --git a/tests/json_infra/conftest.py b/tests/json_infra/conftest.py index ee19715578..13f2e4f2a6 100644 --- a/tests/json_infra/conftest.py +++ b/tests/json_infra/conftest.py @@ -1,24 +1,44 @@ """Pytest configuration for the json infra tests.""" +import json import os import shutil import tarfile from pathlib import Path -from typing import Callable, Final, Optional, Set +from typing import ( + Any, + Callable, + Dict, + Final, + Generator, + List, + Optional, + Self, + Set, + Type, +) import git +import pytest import requests_cache from _pytest.config import Config from _pytest.config.argparsing import Parser from _pytest.nodes import Item from filelock import FileLock from git.exc import GitCommandError, InvalidGitRepositoryError -from pytest import Session, StashKey, fixture +from pytest import Collector, File, Session, StashKey, fixture from requests_cache import CachedSession from requests_cache.backends.sqlite import SQLiteCache -from typing_extensions import Self -from . import TEST_FIXTURES +from ethereum_spec_tools.evm_tools.statetest import TestCase as StateTestCase +from ethereum_spec_tools.evm_tools.statetest import ( + read_test_case as read_state_test_case, +) + +from . import FORKS, TEST_FIXTURES +from .helpers.exceptional_test_patterns import exceptional_state_test_patterns +from .helpers.load_blockchain_tests import run_blockchain_st_test +from .helpers.load_state_tests import run_state_test try: from xdist import get_xdist_worker_id @@ -272,3 +292,220 @@ def pytest_sessionfinish(session: Session, exitstatus: int) -> None: assert lock_file is not None lock_file.release() + + +def pytest_collect_file( + file_path: Path, parent: Collector +) -> Collector | None: + """ + Pytest hook that collects test cases from fixture JSON files. + """ + if file_path.suffix == ".json": + return FixturesFile.from_parent(parent, path=file_path) + return None + + +class Fixture: + """Single fixture from a JSON file.""" + + @classmethod + def is_format(cls, obj: object) -> bool: + """Return true if the object can be parsed as the fixture type.""" + raise NotImplementedError("Not implemented.") + + @classmethod + def collect( + cls, file_path: str, key: str, obj: Dict[str, Any] + ) -> Generator[Item, None, None]: + """Collect tests from a single fixture dictionary.""" + pass + + +class StateTest(Item): + """Single state test case item.""" + + test_case: StateTestCase + test_dict: Dict[str, Any] + + def __init__( + self, + *args: Any, + test_case: StateTestCase, + test_dict: Dict[str, Any], + **kwargs: Any, + ) -> None: + """Initialize a single test case item.""" + super().__init__(*args, **kwargs) + self.test_case = test_case + self.test_dict = test_dict + self.own_markers.append(pytest.mark.fork(self.test_case.fork_name)) + self.own_markers.append(pytest.mark.evm_tools) + self.own_markers.append(pytest.mark.json_state_tests) + eels_fork = FORKS[test_case.fork_name]["eels_fork"] + test_patterns = exceptional_state_test_patterns( + test_case.fork_name, eels_fork + ) + if any(x.search(test_case.key) for x in test_patterns.slow): + self.own_markers.append(pytest.mark.slow) + + def runtest(self) -> None: + """Execute the test logic for this specific static test.""" + test_case_dict = { + "test_file": self.test_case.path, + "test_key": self.test_case.key, + "index": self.test_case.index, + "json_fork": self.test_case.fork_name, + "test_dict": self.test_dict, + } + run_state_test(test_case_dict) + + +class StateTestFixture(Fixture): + """Single state test fixture from a JSON file.""" + + @classmethod + def is_format(cls, obj: object) -> bool: + """Return true if the object can be parsed as the fixture type.""" + if "env" not in obj: + return False + if "pre" not in obj: + return False + if "transaction" not in obj: + return False + if "post" not in obj: + return False + return True + + @classmethod + def collect( + cls, parent: Collector, file_path: str, key: str, obj: Dict[str, Any] + ) -> Generator[Item, None, None]: + """Collect state tests from a single fixture dictionary.""" + for test_case in read_state_test_case( + test_file_path=file_path, key=key, test=obj + ): + name = f"{key} - {test_case.index}" + new_item = StateTest.from_parent( + parent, + name=name, + test_case=test_case, + test_dict=obj, + ) + yield new_item + + +class BlockchainTest(Item): + """Single state test case item.""" + + test_file: str + test_key: str + fork_name: str + test_dict: Dict[str, Any] + + def __init__( + self, + *args: Any, + test_file: str, + test_key: str, + fork_name: str, + test_dict: Dict[str, Any], + **kwargs: Any, + ) -> None: + """Initialize a single test case item.""" + super().__init__(*args, **kwargs) + self.test_file = test_file + self.test_key = test_key + self.test_dict = test_dict + self.own_markers.append(pytest.mark.fork(fork_name)) + self.own_markers.append(pytest.mark.evm_tools) + self.own_markers.append(pytest.mark.json_state_tests) + eels_fork = FORKS[fork_name]["eels_fork"] + test_patterns = exceptional_state_test_patterns(fork_name, eels_fork) + _identifier = "(" + test_file + "|" + test_key + ")" + if any( + x.search(test_file) for x in test_patterns.expected_fail + ) or any(x.search(_identifier) for x in test_patterns.expected_fail): + self.own_markers.append(pytest.mark.skip("Expected to fail")) + if any(x.search(_identifier) for x in test_patterns.slow): + self.own_markers.append(pytest.mark.slow) + if any(x.search(_identifier) for x in test_patterns.big_memory): + self.own_markers.append(pytest.mark.bigmem) + + def runtest(self) -> None: + """Execute the test logic for this specific static test.""" + test_case_dict = { + "test_file": self.test_file, + "test_key": self.test_key, + "test_dict": self.test_dict, + } + run_blockchain_st_test(test_case_dict) + + +class BlockchainTestFixture(Fixture): + """Single blockchain test fixture from a JSON file.""" + + @classmethod + def is_format(cls, obj: Dict) -> bool: + """Return true if the object can be parsed as the fixture type.""" + if "genesisBlockHeader" not in obj: + return False + if "blocks" not in obj: + return False + if "engineNewPayloads" in obj: + return False + if "preHash" in obj: + return False + if "network" not in obj: + return False + return True + + @classmethod + def collect( + cls, parent: Collector, file_path: str, key: str, obj: Dict[str, Any] + ) -> Generator[Item, None, None]: + """Collect blockchain tests from a single fixture dictionary.""" + name = f"{key}" + assert "network" in obj + new_item = BlockchainTest.from_parent( + parent, + name=name, + test_file=file_path, + test_key=key, + fork_name=obj["network"], + test_dict=obj, + ) + yield new_item + + +FixtureTypes: List[Type[Fixture]] = [ + StateTestFixture, + BlockchainTestFixture, +] + + +class FixturesFile(File): + """Single JSON file containing fixtures.""" + + def collect( + self: Self, + ) -> Generator[StateTestFixture | BlockchainTestFixture, None, None]: + """Collect test cases from a single JSON fixtures file.""" + with open(self.path, "r") as file: + try: + loaded_file = json.load(file) + if not isinstance(loaded_file, dict): + return + for key, fixture_dict in loaded_file.items(): + if not isinstance(fixture_dict, dict): + continue + for fixture_type in FixtureTypes: + if not fixture_type.is_format(fixture_dict): + continue + yield from fixture_type.collect( + parent=self, + file_path=self.path, + key=key, + obj=fixture_dict, + ) + except Exception: + return diff --git a/tests/json_infra/helpers/load_blockchain_tests.py b/tests/json_infra/helpers/load_blockchain_tests.py index 990e941a35..d4a62ec878 100644 --- a/tests/json_infra/helpers/load_blockchain_tests.py +++ b/tests/json_infra/helpers/load_blockchain_tests.py @@ -34,10 +34,12 @@ def run_blockchain_st_test(test_case: Dict, load: Load) -> None: test_file = test_case["test_file"] test_key = test_case["test_key"] - with open(test_file, "r") as fp: - data = json.load(fp) - - json_data = data[test_key] + if "test_dict" in test_case: + json_data = test_case["test_dict"] + else: + with open(test_file, "r") as fp: + data = json.load(fp) + json_data = data[test_key] if "postState" not in json_data: pytest.xfail(f"{test_case} doesn't have post state") diff --git a/tests/json_infra/helpers/load_state_tests.py b/tests/json_infra/helpers/load_state_tests.py index 37e6813402..880510bed3 100644 --- a/tests/json_infra/helpers/load_state_tests.py +++ b/tests/json_infra/helpers/load_state_tests.py @@ -5,7 +5,7 @@ import sys from glob import glob from io import StringIO -from typing import Dict, Generator +from typing import Any, Dict, Generator import pytest @@ -71,7 +71,7 @@ def idfn(test_case: Dict) -> str: return f"{folder_name} - {test_key} - {index}" -def run_state_test(test_case: Dict[str, str]) -> None: +def run_state_test(test_case: Dict[str, str | Dict[str, Any]]) -> None: """ Runs a single general state test. """ @@ -79,26 +79,30 @@ def run_state_test(test_case: Dict[str, str]) -> None: test_key = test_case["test_key"] index = test_case["index"] json_fork = test_case["json_fork"] - with open(test_file) as f: - tests = json.load(f) - - env = tests[test_key]["env"] + if "test_dict" in test_case: + test_dict = test_case["test_dict"] + else: + with open(test_file) as f: + tests = json.load(f) + test_dict = tests[test_key] + + env = test_dict["env"] try: env["blockHashes"] = {"0": env["previousHash"]} except KeyError: env["blockHashes"] = {} env["withdrawals"] = [] - alloc = tests[test_key]["pre"] + alloc = test_dict["pre"] - post = tests[test_key]["post"][json_fork][index] + post = test_dict["post"][json_fork][index] post_hash = post["hash"] d = post["indexes"]["data"] g = post["indexes"]["gas"] v = post["indexes"]["value"] tx = {} - for k, value in tests[test_key]["transaction"].items(): + for k, value in test_dict["transaction"].items(): if k == "data": tx["input"] = value[d] elif k == "gasLimit": diff --git a/tests/json_infra/test_blockchain_tests.py b/tests/json_infra/test_blockchain_tests.py deleted file mode 100644 index 9e19a361cf..0000000000 --- a/tests/json_infra/test_blockchain_tests.py +++ /dev/null @@ -1,40 +0,0 @@ -"""Run the blockchain tests from json fixtures.""" - -from typing import Callable, Dict - -import pytest - -from . import FORKS -from .helpers.load_blockchain_tests import ( - Load, - fetch_blockchain_tests, - idfn, - run_blockchain_st_test, -) - - -def _generate_test_function(fork_name: str) -> Callable: - """Generates a test function for blockchain tests for a specific fork.""" - - @pytest.mark.fork(fork_name) - @pytest.mark.json_blockchain_tests - @pytest.mark.parametrize( - "blockchain_test_case", - fetch_blockchain_tests(fork_name), - ids=idfn, - ) - def test_func(blockchain_test_case: Dict) -> None: - load = Load( - blockchain_test_case["json_fork"], - blockchain_test_case["eels_fork"], - ) - run_blockchain_st_test(blockchain_test_case, load=load) - - test_func.__name__ = f"test_blockchain_tests_{fork_name.lower()}" - return test_func - - -for fork_name in FORKS.keys(): - locals()[f"test_blockchain_tests_{fork_name.lower()}"] = ( - _generate_test_function(fork_name) - ) diff --git a/tests/json_infra/test_state_tests.py b/tests/json_infra/test_state_tests.py deleted file mode 100644 index 20bb578654..0000000000 --- a/tests/json_infra/test_state_tests.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Run the state tests from json fixtures.""" - -from typing import Callable, Dict - -import pytest - -from . import FORKS -from .helpers.load_state_tests import fetch_state_tests, idfn, run_state_test - - -def _generate_test_function(fork_name: str) -> Callable: - """Generates a test function for state tests for a specific fork.""" - - @pytest.mark.fork(fork_name) - @pytest.mark.evm_tools - @pytest.mark.json_state_tests - @pytest.mark.parametrize( - "state_test_case", - fetch_state_tests(fork_name), - ids=idfn, - ) - def test_func(state_test_case: Dict) -> None: - run_state_test(state_test_case) - - test_func.__name__ = f"test_state_tests_{fork_name.lower()}" - return test_func - - -for fork_name in FORKS.keys(): - locals()[f"test_state_tests_{fork_name.lower()}"] = ( - _generate_test_function(fork_name) - ) From d11c62a09d5e30d5c926c0b069b0c04588c8a4be Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 23 Oct 2025 04:24:31 +0000 Subject: [PATCH 02/12] fix(tests): json collecting --- tests/json_infra/conftest.py | 52 +++++++++++++++++++++++------------- 1 file changed, 34 insertions(+), 18 deletions(-) diff --git a/tests/json_infra/conftest.py b/tests/json_infra/conftest.py index 13f2e4f2a6..5f7551720d 100644 --- a/tests/json_infra/conftest.py +++ b/tests/json_infra/conftest.py @@ -4,6 +4,7 @@ import os import shutil import tarfile +from glob import glob from pathlib import Path from typing import ( Any, @@ -36,7 +37,10 @@ ) from . import FORKS, TEST_FIXTURES -from .helpers.exceptional_test_patterns import exceptional_state_test_patterns +from .helpers.exceptional_test_patterns import ( + exceptional_blockchain_test_patterns, + exceptional_state_test_patterns, +) from .helpers.load_blockchain_tests import run_blockchain_st_test from .helpers.load_state_tests import run_state_test @@ -280,6 +284,13 @@ def pytest_sessionstart(session: Session) -> None: fixture_path, ) + # Remove any python files in the downloaded files to avoid + # importing them. + for python_file in glob( + os.path.join(fixture_path, "**/*.py"), recursive=True + ): + os.unlink(python_file) + def pytest_sessionfinish(session: Session, exitstatus: int) -> None: """Clean up file locks at session finish.""" @@ -384,6 +395,8 @@ def collect( for test_case in read_state_test_case( test_file_path=file_path, key=key, test=obj ): + if test_case.fork_name not in FORKS: + continue name = f"{key} - {test_case.index}" new_item = StateTest.from_parent( parent, @@ -418,9 +431,11 @@ def __init__( self.test_dict = test_dict self.own_markers.append(pytest.mark.fork(fork_name)) self.own_markers.append(pytest.mark.evm_tools) - self.own_markers.append(pytest.mark.json_state_tests) + self.own_markers.append(pytest.mark.json_blockchain_tests) eels_fork = FORKS[fork_name]["eels_fork"] - test_patterns = exceptional_state_test_patterns(fork_name, eels_fork) + test_patterns = exceptional_blockchain_test_patterns( + fork_name, eels_fork + ) _identifier = "(" + test_file + "|" + test_key + ")" if any( x.search(test_file) for x in test_patterns.expected_fail @@ -465,7 +480,8 @@ def collect( ) -> Generator[Item, None, None]: """Collect blockchain tests from a single fixture dictionary.""" name = f"{key}" - assert "network" in obj + if "network" not in obj or obj["network"] not in FORKS: + return new_item = BlockchainTest.from_parent( parent, name=name, @@ -493,19 +509,19 @@ def collect( with open(self.path, "r") as file: try: loaded_file = json.load(file) - if not isinstance(loaded_file, dict): - return - for key, fixture_dict in loaded_file.items(): - if not isinstance(fixture_dict, dict): - continue - for fixture_type in FixtureTypes: - if not fixture_type.is_format(fixture_dict): - continue - yield from fixture_type.collect( - parent=self, - file_path=self.path, - key=key, - obj=fixture_dict, - ) except Exception: + return # Skip *.json files that are unreadable. + if not isinstance(loaded_file, dict): return + for key, fixture_dict in loaded_file.items(): + if not isinstance(fixture_dict, dict): + continue + for fixture_type in FixtureTypes: + if not fixture_type.is_format(fixture_dict): + continue + yield from fixture_type.collect( + parent=self, + file_path=str(self.path), + key=key, + obj=fixture_dict, + ) From 0b6d57c07c70bd1ec98a32345c1921ef471ce5bb Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 23 Oct 2025 04:50:46 +0000 Subject: [PATCH 03/12] fix(tests): blockchain test execution --- tests/json_infra/conftest.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/tests/json_infra/conftest.py b/tests/json_infra/conftest.py index 5f7551720d..7264850e3b 100644 --- a/tests/json_infra/conftest.py +++ b/tests/json_infra/conftest.py @@ -41,7 +41,7 @@ exceptional_blockchain_test_patterns, exceptional_state_test_patterns, ) -from .helpers.load_blockchain_tests import run_blockchain_st_test +from .helpers.load_blockchain_tests import Load, run_blockchain_st_test from .helpers.load_state_tests import run_state_test try: @@ -453,7 +453,12 @@ def runtest(self) -> None: "test_key": self.test_key, "test_dict": self.test_dict, } - run_blockchain_st_test(test_case_dict) + eels_fork = FORKS[self.fork_name]["eels_fork"] + load = Load( + self.fork_name, + eels_fork, + ) + run_blockchain_st_test(test_case_dict, load=load) class BlockchainTestFixture(Fixture): From 594193848a963123bbef623004d5e0961ad5c0c9 Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 23 Oct 2025 04:53:17 +0000 Subject: [PATCH 04/12] fix(tests): blockchain test execution --- tests/json_infra/conftest.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/json_infra/conftest.py b/tests/json_infra/conftest.py index 7264850e3b..3d6bbe353a 100644 --- a/tests/json_infra/conftest.py +++ b/tests/json_infra/conftest.py @@ -429,6 +429,7 @@ def __init__( self.test_file = test_file self.test_key = test_key self.test_dict = test_dict + self.fork_name = fork_name self.own_markers.append(pytest.mark.fork(fork_name)) self.own_markers.append(pytest.mark.evm_tools) self.own_markers.append(pytest.mark.json_blockchain_tests) From bb9c70c156c6db03a92647cc5bc2903314d34582 Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 23 Oct 2025 17:55:25 +0000 Subject: [PATCH 05/12] refactor(tests): Refactor types in json_infra --- tests/json_infra/conftest.py | 230 +------------- tests/json_infra/helpers/__init__.py | 13 + .../helpers/exceptional_test_patterns.py | 3 + tests/json_infra/helpers/fixtures.py | 49 +++ .../helpers/load_blockchain_tests.py | 299 ++++++++---------- tests/json_infra/helpers/load_state_tests.py | 261 +++++++-------- 6 files changed, 347 insertions(+), 508 deletions(-) create mode 100644 tests/json_infra/helpers/fixtures.py diff --git a/tests/json_infra/conftest.py b/tests/json_infra/conftest.py index 3d6bbe353a..0cc080b4c8 100644 --- a/tests/json_infra/conftest.py +++ b/tests/json_infra/conftest.py @@ -7,20 +7,15 @@ from glob import glob from pathlib import Path from typing import ( - Any, Callable, - Dict, Final, Generator, - List, Optional, Self, Set, - Type, ) import git -import pytest import requests_cache from _pytest.config import Config from _pytest.config.argparsing import Parser @@ -31,18 +26,8 @@ from requests_cache import CachedSession from requests_cache.backends.sqlite import SQLiteCache -from ethereum_spec_tools.evm_tools.statetest import TestCase as StateTestCase -from ethereum_spec_tools.evm_tools.statetest import ( - read_test_case as read_state_test_case, -) - -from . import FORKS, TEST_FIXTURES -from .helpers.exceptional_test_patterns import ( - exceptional_blockchain_test_patterns, - exceptional_state_test_patterns, -) -from .helpers.load_blockchain_tests import Load, run_blockchain_st_test -from .helpers.load_state_tests import run_state_test +from . import TEST_FIXTURES +from .helpers import ALL_FIXTURE_TYPES try: from xdist import get_xdist_worker_id @@ -316,201 +301,12 @@ def pytest_collect_file( return None -class Fixture: - """Single fixture from a JSON file.""" - - @classmethod - def is_format(cls, obj: object) -> bool: - """Return true if the object can be parsed as the fixture type.""" - raise NotImplementedError("Not implemented.") - - @classmethod - def collect( - cls, file_path: str, key: str, obj: Dict[str, Any] - ) -> Generator[Item, None, None]: - """Collect tests from a single fixture dictionary.""" - pass - - -class StateTest(Item): - """Single state test case item.""" - - test_case: StateTestCase - test_dict: Dict[str, Any] - - def __init__( - self, - *args: Any, - test_case: StateTestCase, - test_dict: Dict[str, Any], - **kwargs: Any, - ) -> None: - """Initialize a single test case item.""" - super().__init__(*args, **kwargs) - self.test_case = test_case - self.test_dict = test_dict - self.own_markers.append(pytest.mark.fork(self.test_case.fork_name)) - self.own_markers.append(pytest.mark.evm_tools) - self.own_markers.append(pytest.mark.json_state_tests) - eels_fork = FORKS[test_case.fork_name]["eels_fork"] - test_patterns = exceptional_state_test_patterns( - test_case.fork_name, eels_fork - ) - if any(x.search(test_case.key) for x in test_patterns.slow): - self.own_markers.append(pytest.mark.slow) - - def runtest(self) -> None: - """Execute the test logic for this specific static test.""" - test_case_dict = { - "test_file": self.test_case.path, - "test_key": self.test_case.key, - "index": self.test_case.index, - "json_fork": self.test_case.fork_name, - "test_dict": self.test_dict, - } - run_state_test(test_case_dict) - - -class StateTestFixture(Fixture): - """Single state test fixture from a JSON file.""" - - @classmethod - def is_format(cls, obj: object) -> bool: - """Return true if the object can be parsed as the fixture type.""" - if "env" not in obj: - return False - if "pre" not in obj: - return False - if "transaction" not in obj: - return False - if "post" not in obj: - return False - return True - - @classmethod - def collect( - cls, parent: Collector, file_path: str, key: str, obj: Dict[str, Any] - ) -> Generator[Item, None, None]: - """Collect state tests from a single fixture dictionary.""" - for test_case in read_state_test_case( - test_file_path=file_path, key=key, test=obj - ): - if test_case.fork_name not in FORKS: - continue - name = f"{key} - {test_case.index}" - new_item = StateTest.from_parent( - parent, - name=name, - test_case=test_case, - test_dict=obj, - ) - yield new_item - - -class BlockchainTest(Item): - """Single state test case item.""" - - test_file: str - test_key: str - fork_name: str - test_dict: Dict[str, Any] - - def __init__( - self, - *args: Any, - test_file: str, - test_key: str, - fork_name: str, - test_dict: Dict[str, Any], - **kwargs: Any, - ) -> None: - """Initialize a single test case item.""" - super().__init__(*args, **kwargs) - self.test_file = test_file - self.test_key = test_key - self.test_dict = test_dict - self.fork_name = fork_name - self.own_markers.append(pytest.mark.fork(fork_name)) - self.own_markers.append(pytest.mark.evm_tools) - self.own_markers.append(pytest.mark.json_blockchain_tests) - eels_fork = FORKS[fork_name]["eels_fork"] - test_patterns = exceptional_blockchain_test_patterns( - fork_name, eels_fork - ) - _identifier = "(" + test_file + "|" + test_key + ")" - if any( - x.search(test_file) for x in test_patterns.expected_fail - ) or any(x.search(_identifier) for x in test_patterns.expected_fail): - self.own_markers.append(pytest.mark.skip("Expected to fail")) - if any(x.search(_identifier) for x in test_patterns.slow): - self.own_markers.append(pytest.mark.slow) - if any(x.search(_identifier) for x in test_patterns.big_memory): - self.own_markers.append(pytest.mark.bigmem) - - def runtest(self) -> None: - """Execute the test logic for this specific static test.""" - test_case_dict = { - "test_file": self.test_file, - "test_key": self.test_key, - "test_dict": self.test_dict, - } - eels_fork = FORKS[self.fork_name]["eels_fork"] - load = Load( - self.fork_name, - eels_fork, - ) - run_blockchain_st_test(test_case_dict, load=load) - - -class BlockchainTestFixture(Fixture): - """Single blockchain test fixture from a JSON file.""" - - @classmethod - def is_format(cls, obj: Dict) -> bool: - """Return true if the object can be parsed as the fixture type.""" - if "genesisBlockHeader" not in obj: - return False - if "blocks" not in obj: - return False - if "engineNewPayloads" in obj: - return False - if "preHash" in obj: - return False - if "network" not in obj: - return False - return True - - @classmethod - def collect( - cls, parent: Collector, file_path: str, key: str, obj: Dict[str, Any] - ) -> Generator[Item, None, None]: - """Collect blockchain tests from a single fixture dictionary.""" - name = f"{key}" - if "network" not in obj or obj["network"] not in FORKS: - return - new_item = BlockchainTest.from_parent( - parent, - name=name, - test_file=file_path, - test_key=key, - fork_name=obj["network"], - test_dict=obj, - ) - yield new_item - - -FixtureTypes: List[Type[Fixture]] = [ - StateTestFixture, - BlockchainTestFixture, -] - - class FixturesFile(File): """Single JSON file containing fixtures.""" def collect( self: Self, - ) -> Generator[StateTestFixture | BlockchainTestFixture, None, None]: + ) -> Generator[Item | Collector, None, None]: """Collect test cases from a single JSON fixtures file.""" with open(self.path, "r") as file: try: @@ -519,15 +315,19 @@ def collect( return # Skip *.json files that are unreadable. if not isinstance(loaded_file, dict): return - for key, fixture_dict in loaded_file.items(): - if not isinstance(fixture_dict, dict): + for key, test_dict in loaded_file.items(): + if not isinstance(test_dict, dict): continue - for fixture_type in FixtureTypes: - if not fixture_type.is_format(fixture_dict): + for fixture_type in ALL_FIXTURE_TYPES: + if not fixture_type.is_format(test_dict): continue - yield from fixture_type.collect( + name = key + if "::" in name: + name = name.split("::")[1] + yield fixture_type.from_parent( # type: ignore parent=self, - file_path=str(self.path), - key=key, - obj=fixture_dict, + name=name, + test_file=str(self.path), + test_key=key, + test_dict=test_dict, ) diff --git a/tests/json_infra/helpers/__init__.py b/tests/json_infra/helpers/__init__.py index 3214c2cc14..7791d9803c 100644 --- a/tests/json_infra/helpers/__init__.py +++ b/tests/json_infra/helpers/__init__.py @@ -1 +1,14 @@ """Helpers to load tests from JSON files.""" + +from typing import List, Type + +from .fixtures import Fixture +from .load_blockchain_tests import BlockchainTestFixture +from .load_state_tests import StateTestFixture + +ALL_FIXTURE_TYPES: List[Type[Fixture]] = [ + BlockchainTestFixture, + StateTestFixture, +] + +__all__ = ["ALL_FIXTURE_TYPES", "Fixture"] diff --git a/tests/json_infra/helpers/exceptional_test_patterns.py b/tests/json_infra/helpers/exceptional_test_patterns.py index cebb1a9ef5..6355cfbf81 100644 --- a/tests/json_infra/helpers/exceptional_test_patterns.py +++ b/tests/json_infra/helpers/exceptional_test_patterns.py @@ -5,6 +5,7 @@ import re from dataclasses import dataclass +from functools import lru_cache from typing import Pattern, Tuple @@ -20,6 +21,7 @@ class TestPatterns: big_memory: Tuple[Pattern[str], ...] +@lru_cache def exceptional_blockchain_test_patterns( json_fork: str, eels_fork: str ) -> TestPatterns: @@ -104,6 +106,7 @@ def exceptional_blockchain_test_patterns( ) +@lru_cache def exceptional_state_test_patterns( json_fork: str, eels_fork: str ) -> TestPatterns: diff --git a/tests/json_infra/helpers/fixtures.py b/tests/json_infra/helpers/fixtures.py new file mode 100644 index 0000000000..f150d33540 --- /dev/null +++ b/tests/json_infra/helpers/fixtures.py @@ -0,0 +1,49 @@ +"""Base class for all fixture loaders.""" + +from abc import ABC, abstractmethod +from typing import Any, Dict, Self + +from _pytest.nodes import Node + + +class Fixture(ABC): + """ + Single fixture from a JSON file. + + It can be subclassed in combination with Item or Collector to create a + fixture that can be collected by pytest. + """ + + test_file: str + test_key: str + test_dict: Dict[str, Any] + + def __init__( + self, + *args: Any, + test_file: str, + test_key: str, + test_dict: Dict[str, Any], + **kwargs: Any, + ): + super().__init__(*args, **kwargs) + self.test_file = test_file + self.test_key = test_key + self.test_dict = test_dict + + @classmethod + def from_parent( + cls, + parent: Node, + **kwargs: Any, + ) -> Self: + """Pytest hook that returns a fixture from a JSON file.""" + return super().from_parent( # type: ignore[misc] + parent=parent, **kwargs + ) + + @classmethod + @abstractmethod + def is_format(cls, test_dict: Dict[str, Any]) -> bool: + """Return true if the object can be parsed as the fixture type.""" + pass diff --git a/tests/json_infra/helpers/load_blockchain_tests.py b/tests/json_infra/helpers/load_blockchain_tests.py index d4a62ec878..94b54bcab1 100644 --- a/tests/json_infra/helpers/load_blockchain_tests.py +++ b/tests/json_infra/helpers/load_blockchain_tests.py @@ -1,17 +1,15 @@ """Helpers to load and run blockchain tests from JSON files.""" import importlib -import json -import os.path -from glob import glob -from typing import Any, Dict, Generator +from pathlib import Path +from typing import Any, Dict, Tuple from unittest.mock import call, patch import pytest -from _pytest.mark.structures import ParameterSet from ethereum_rlp import rlp from ethereum_rlp.exceptions import RLPException from ethereum_types.numeric import U64 +from pytest import Item from ethereum.crypto.hash import keccak256 from ethereum.exceptions import EthereumException, StateWithEmptyAccount @@ -20,6 +18,7 @@ from .. import FORKS from .exceptional_test_patterns import exceptional_blockchain_test_patterns +from .fixtures import Fixture class NoTestsFoundError(Exception): @@ -29,81 +28,6 @@ class NoTestsFoundError(Exception): """ -def run_blockchain_st_test(test_case: Dict, load: Load) -> None: - """Run a blockchain state test from JSON test case data.""" - test_file = test_case["test_file"] - test_key = test_case["test_key"] - - if "test_dict" in test_case: - json_data = test_case["test_dict"] - else: - with open(test_file, "r") as fp: - data = json.load(fp) - json_data = data[test_key] - - if "postState" not in json_data: - pytest.xfail(f"{test_case} doesn't have post state") - - genesis_header = load.json_to_header(json_data["genesisBlockHeader"]) - parameters = [ - genesis_header, - (), - (), - ] - if hasattr(genesis_header, "withdrawals_root"): - parameters.append(()) - - if hasattr(genesis_header, "requests_root"): - parameters.append(()) - - genesis_block = load.fork.Block(*parameters) - - genesis_header_hash = hex_to_bytes(json_data["genesisBlockHeader"]["hash"]) - assert keccak256(rlp.encode(genesis_header)) == genesis_header_hash - genesis_rlp = hex_to_bytes(json_data["genesisRLP"]) - assert rlp.encode(genesis_block) == genesis_rlp - - try: - state = load.json_to_state(json_data["pre"]) - except StateWithEmptyAccount as e: - pytest.xfail(str(e)) - - chain = load.fork.BlockChain( - blocks=[genesis_block], - state=state, - chain_id=U64(json_data["genesisBlockHeader"].get("chainId", 1)), - ) - - mock_pow = ( - json_data["sealEngine"] == "NoProof" and not load.fork.proof_of_stake - ) - - for json_block in json_data["blocks"]: - block_exception = None - for key, value in json_block.items(): - if key.startswith("expectException"): - block_exception = value - break - - if block_exception: - # TODO: Once all the specific exception types are thrown, - # only `pytest.raises` the correct exception type instead of - # all of them. - with pytest.raises((EthereumException, RLPException)): - add_block_to_chain(chain, json_block, load, mock_pow) - return - else: - add_block_to_chain(chain, json_block, load, mock_pow) - - last_block_hash = hex_to_bytes(json_data["lastblockhash"]) - assert keccak256(rlp.encode(chain.blocks[-1].header)) == last_block_hash - - expected_post_state = load.json_to_state(json_data["postState"]) - assert chain.state == expected_post_state - load.fork.close_state(chain.state) - load.fork.close_state(expected_post_state) - - def add_block_to_chain( chain: Any, json_block: Any, load: Load, mock_pow: bool ) -> None: @@ -135,96 +59,133 @@ def add_block_to_chain( ) -# Functions that fetch individual test cases -def load_json_fixture(test_file: str, json_fork: str) -> Generator: - """Load test cases from a JSON fixture file for the specified fork.""" - # Extract the pure basename of the file without the path to the file. - # Ex: Extract "world.json" from "path/to/file/world.json" - # Extract the filename without the extension. Ex: Extract "world" from - # "world.json" - with open(test_file, "r") as fp: - data = json.load(fp) - - # Search tests by looking at the `network` attribute - found_keys = [] - for key, test in data.items(): - if "network" not in test: - continue - - if test["network"] == json_fork: - found_keys.append(key) - - if not any(found_keys): - raise NoTestsFoundError - - for _key in found_keys: - yield { - "test_file": test_file, - "test_key": _key, - "json_fork": json_fork, - } - - -def fetch_blockchain_tests( - json_fork: str, -) -> Generator[Dict | ParameterSet, None, None]: - """Fetch all blockchain test cases for the specified JSON fork.""" - # Filter FORKS based on fork_option parameter - eels_fork = FORKS[json_fork]["eels_fork"] - test_dirs = FORKS[json_fork]["blockchain_test_dirs"] - - test_patterns = exceptional_blockchain_test_patterns(json_fork, eels_fork) - - # Get all the files to iterate over from both eest_tests_path - # and ethereum_tests_path - all_jsons = [] - for test_dir in test_dirs: - all_jsons.extend( - glob(os.path.join(test_dir, "**/*.json"), recursive=True) +class BlockchainTestFixture(Fixture, Item): + """Single blockchain test fixture from a JSON file.""" + + fork_name: str + + def __init__( + self, + *args: Any, + **kwargs: Any, + ) -> None: + """Initialize a single blockchain test fixture from a JSON file.""" + super().__init__(*args, **kwargs) + self.fork_name = self.test_dict["network"] + self.add_marker(pytest.mark.fork(self.fork_name)) + self.add_marker("evm_tools") + self.add_marker("json_blockchain_tests") + eels_fork = FORKS[self.fork_name]["eels_fork"] + test_patterns = exceptional_blockchain_test_patterns( + self.fork_name, eels_fork ) + assert self.test_file is not None + assert self.test_key is not None + _identifier = "(" + self.test_file + "|" + self.test_key + ")" + if any( + x.search(self.test_file) for x in test_patterns.expected_fail + ) or any(x.search(_identifier) for x in test_patterns.expected_fail): + self.add_marker(pytest.mark.skip("Expected to fail")) + if any(x.search(_identifier) for x in test_patterns.slow): + self.add_marker("slow") + if any(x.search(_identifier) for x in test_patterns.big_memory): + self.add_marker("bigmem") + + def runtest(self) -> None: + """Run a blockchain state test from JSON test case data.""" + json_data = self.test_dict + if "postState" not in json_data: + pytest.xfail( + f"{self.test_file}[{self.test_key}] doesn't have post state" + ) + + eels_fork = FORKS[self.fork_name]["eels_fork"] + load = Load( + self.fork_name, + eels_fork, + ) + + genesis_header = load.json_to_header(json_data["genesisBlockHeader"]) + parameters = [ + genesis_header, + (), + (), + ] + if hasattr(genesis_header, "withdrawals_root"): + parameters.append(()) - files_to_iterate = [] - for full_path in all_jsons: - if not any(x.search(full_path) for x in test_patterns.expected_fail): - # If a file or folder is marked for ignore, - # it can already be dropped at this stage - files_to_iterate.append(full_path) + if hasattr(genesis_header, "requests_root"): + parameters.append(()) + + genesis_block = load.fork.Block(*parameters) + + genesis_header_hash = hex_to_bytes( + json_data["genesisBlockHeader"]["hash"] + ) + assert keccak256(rlp.encode(genesis_header)) == genesis_header_hash + genesis_rlp = hex_to_bytes(json_data["genesisRLP"]) + assert rlp.encode(genesis_block) == genesis_rlp - # Start yielding individual test cases from the file list - for _test_file in files_to_iterate: try: - for _test_case in load_json_fixture(_test_file, json_fork): - # _identifier could identify files, folders through test_file - # individual cases through test_key - _identifier = ( - "(" - + _test_case["test_file"] - + "|" - + _test_case["test_key"] - + ")" - ) - _test_case["eels_fork"] = eels_fork - if any( - x.search(_identifier) for x in test_patterns.expected_fail - ): - continue - elif any(x.search(_identifier) for x in test_patterns.slow): - yield pytest.param(_test_case, marks=pytest.mark.slow) - elif any( - x.search(_identifier) for x in test_patterns.big_memory - ): - yield pytest.param(_test_case, marks=pytest.mark.bigmem) - else: - yield _test_case - except NoTestsFoundError: - # file doesn't contain tests for the given fork - continue - - -# Test case Identifier -def idfn(test_case: Dict) -> str: - """Generate test case identifier from test case dictionary.""" - if isinstance(test_case, dict): - folder_name = test_case["test_file"].split("/")[-2] - # Assign Folder name and test_key to identify tests in output - return folder_name + " - " + test_case["test_key"] + state = load.json_to_state(json_data["pre"]) + except StateWithEmptyAccount as e: + pytest.xfail(str(e)) + + chain = load.fork.BlockChain( + blocks=[genesis_block], + state=state, + chain_id=U64(json_data["genesisBlockHeader"].get("chainId", 1)), + ) + + mock_pow = ( + json_data["sealEngine"] == "NoProof" + and not load.fork.proof_of_stake + ) + + for json_block in json_data["blocks"]: + block_exception = None + for key, value in json_block.items(): + if key.startswith("expectException"): + block_exception = value + break + + if block_exception: + # TODO: Once all the specific exception types are thrown, + # only `pytest.raises` the correct exception type instead + # of all of them. + with pytest.raises((EthereumException, RLPException)): + add_block_to_chain(chain, json_block, load, mock_pow) + return + else: + add_block_to_chain(chain, json_block, load, mock_pow) + + last_block_hash = hex_to_bytes(json_data["lastblockhash"]) + assert ( + keccak256(rlp.encode(chain.blocks[-1].header)) == last_block_hash + ) + + expected_post_state = load.json_to_state(json_data["postState"]) + assert chain.state == expected_post_state + load.fork.close_state(chain.state) + load.fork.close_state(expected_post_state) + + def reportinfo(self) -> Tuple[Path, int, str]: + """Return information for test reporting.""" + return self.path, 1, self.name + + @classmethod + def is_format(cls, test_dict: Dict[str, Any]) -> bool: + """Return true if the object can be parsed as the fixture type.""" + if "genesisBlockHeader" not in test_dict: + return False + if "blocks" not in test_dict: + return False + if "engineNewPayloads" in test_dict: + return False + if "preHash" in test_dict: + return False + if "network" not in test_dict: + return False + if test_dict["network"] not in FORKS: + return False + return True diff --git a/tests/json_infra/helpers/load_state_tests.py b/tests/json_infra/helpers/load_state_tests.py index 880510bed3..21eea12df7 100644 --- a/tests/json_infra/helpers/load_state_tests.py +++ b/tests/json_infra/helpers/load_state_tests.py @@ -1,152 +1,165 @@ """Helper functions to load and run general state tests for Ethereum forks.""" import json -import os import sys -from glob import glob from io import StringIO -from typing import Any, Dict, Generator +from typing import Any, Dict, Iterable import pytest +from _pytest.nodes import Item +from pytest import Collector from ethereum.exceptions import StateWithEmptyAccount from ethereum.utils.hexadecimal import hex_to_bytes from ethereum_spec_tools.evm_tools import create_parser -from ethereum_spec_tools.evm_tools.statetest import read_test_cases +from ethereum_spec_tools.evm_tools.statetest import TestCase as StateTestCase +from ethereum_spec_tools.evm_tools.statetest import ( + read_test_case as read_state_test_case, +) from ethereum_spec_tools.evm_tools.t8n import T8N from .. import FORKS -from .exceptional_test_patterns import exceptional_state_test_patterns +from .exceptional_test_patterns import ( + exceptional_state_test_patterns, +) +from .fixtures import Fixture parser = create_parser() -def fetch_state_tests(json_fork: str) -> Generator: - """ - Fetches all the general state tests from the given directory. - """ - # Filter FORKS based on fork_option parameter - eels_fork = FORKS[json_fork]["eels_fork"] - test_dirs = FORKS[json_fork]["state_test_dirs"] - - test_patterns = exceptional_state_test_patterns(json_fork, eels_fork) - - # Get all the files to iterate over from both eest_tests_path - # and ethereum_tests_path - all_jsons = [] - for test_dir in test_dirs: - all_jsons.extend( - glob(os.path.join(test_dir, "**/*.json"), recursive=True) +class StateTest(Item): + """Single state test case item.""" + + test_case: StateTestCase + test_dict: Dict[str, Any] + + def __init__( + self, + *args: Any, + test_case: StateTestCase, + test_dict: Dict[str, Any], + **kwargs: Any, + ) -> None: + """Initialize a single test case item.""" + super().__init__(*args, **kwargs) + self.test_case = test_case + self.test_dict = test_dict + self.add_marker(pytest.mark.fork(self.test_case.fork_name)) + self.add_marker("evm_tools") + self.add_marker("json_state_tests") + eels_fork = FORKS[test_case.fork_name]["eels_fork"] + test_patterns = exceptional_state_test_patterns( + test_case.fork_name, eels_fork + ) + if any(x.search(test_case.key) for x in test_patterns.slow): + self.add_marker("slow") + + def runtest(self) -> None: + """ + Runs a single general state test. + """ + index = self.test_case.index + json_fork = self.test_case.fork_name + test_dict = self.test_dict + + env = test_dict["env"] + try: + env["blockHashes"] = {"0": env["previousHash"]} + except KeyError: + env["blockHashes"] = {} + env["withdrawals"] = [] + + alloc = test_dict["pre"] + + post = test_dict["post"][json_fork][index] + post_hash = post["hash"] + d = post["indexes"]["data"] + g = post["indexes"]["gas"] + v = post["indexes"]["value"] + + tx = {} + for k, value in test_dict["transaction"].items(): + if k == "data": + tx["input"] = value[d] + elif k == "gasLimit": + tx["gas"] = value[g] + elif k == "value": + tx[k] = value[v] + elif k == "accessLists": + if value[d] is not None: + tx["accessList"] = value[d] + else: + tx[k] = value + + txs = [tx] + + in_stream = StringIO( + json.dumps( + { + "env": env, + "alloc": alloc, + "txs": txs, + } + ) ) - for test_file_path in all_jsons: - test_cases = read_test_cases(test_file_path) + # Run the t8n tool + t8n_args = [ + "t8n", + "--input.alloc", + "stdin", + "--input.env", + "stdin", + "--input.txs", + "stdin", + "--state.fork", + f"{json_fork}", + "--state-test", + ] + t8n_options = parser.parse_args(t8n_args) - for test_case in test_cases: - if test_case.fork_name != json_fork: - continue + try: + t8n = T8N(t8n_options, sys.stdout, in_stream) + except StateWithEmptyAccount as e: + pytest.xfail(str(e)) - test_case_dict = { - "test_file": test_case.path, - "test_key": test_case.key, - "index": test_case.index, - "json_fork": json_fork, - } + t8n.run_state_test() - if any(x.search(test_case.key) for x in test_patterns.slow): - yield pytest.param(test_case_dict, marks=pytest.mark.slow) - else: - yield test_case_dict + assert hex_to_bytes(post_hash) == t8n.result.state_root -def idfn(test_case: Dict) -> str: +class StateTestFixture(Fixture, Collector): """ - Identify the test case. + State test fixture from a JSON file that can contain multiple test + cases. """ - if isinstance(test_case, dict): - folder_name = test_case["test_file"].split("/")[-2] - test_key = test_case["test_key"] - index = test_case["index"] - return f"{folder_name} - {test_key} - {index}" - - -def run_state_test(test_case: Dict[str, str | Dict[str, Any]]) -> None: - """ - Runs a single general state test. - """ - test_file = test_case["test_file"] - test_key = test_case["test_key"] - index = test_case["index"] - json_fork = test_case["json_fork"] - if "test_dict" in test_case: - test_dict = test_case["test_dict"] - else: - with open(test_file) as f: - tests = json.load(f) - test_dict = tests[test_key] - - env = test_dict["env"] - try: - env["blockHashes"] = {"0": env["previousHash"]} - except KeyError: - env["blockHashes"] = {} - env["withdrawals"] = [] - - alloc = test_dict["pre"] - - post = test_dict["post"][json_fork][index] - post_hash = post["hash"] - d = post["indexes"]["data"] - g = post["indexes"]["gas"] - v = post["indexes"]["value"] - - tx = {} - for k, value in test_dict["transaction"].items(): - if k == "data": - tx["input"] = value[d] - elif k == "gasLimit": - tx["gas"] = value[g] - elif k == "value": - tx[k] = value[v] - elif k == "accessLists": - if value[d] is not None: - tx["accessList"] = value[d] - else: - tx[k] = value - - txs = [tx] - - in_stream = StringIO( - json.dumps( - { - "env": env, - "alloc": alloc, - "txs": txs, - } - ) - ) - - # Run the t8n tool - t8n_args = [ - "t8n", - "--input.alloc", - "stdin", - "--input.env", - "stdin", - "--input.txs", - "stdin", - "--state.fork", - f"{json_fork}", - "--state-test", - ] - t8n_options = parser.parse_args(t8n_args) - - try: - t8n = T8N(t8n_options, sys.stdout, in_stream) - except StateWithEmptyAccount as e: - pytest.xfail(str(e)) - - t8n.run_state_test() - - assert hex_to_bytes(post_hash) == t8n.result.state_root + @classmethod + def is_format(cls, test_dict: Dict[str, Any]) -> bool: + """Return true if the object can be parsed as the fixture type.""" + if "env" not in test_dict: + return False + if "pre" not in test_dict: + return False + if "transaction" not in test_dict: + return False + if "post" not in test_dict: + return False + return True + + def collect(self) -> Iterable[Item | Collector]: + """Collect state test cases inside of this fixture.""" + for test_case in read_state_test_case( + test_file_path=self.test_file, + key=self.test_key, + test=self.test_dict, + ): + if test_case.fork_name not in FORKS: + continue + name = f"{test_case.index}" + yield StateTest.from_parent( + parent=self, + name=name, + test_case=test_case, + test_dict=self.test_dict, + ) From 5e9663e937d66e5f6dccc434c69e216765628d96 Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 23 Oct 2025 18:09:45 +0000 Subject: [PATCH 06/12] fix(tests): json_infra, imports, parse `exceptions` in some tests --- tests/json_infra/helpers/load_blockchain_tests.py | 3 +++ tests/json_infra/helpers/load_state_tests.py | 11 ++++------- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/tests/json_infra/helpers/load_blockchain_tests.py b/tests/json_infra/helpers/load_blockchain_tests.py index 94b54bcab1..d312eae2e6 100644 --- a/tests/json_infra/helpers/load_blockchain_tests.py +++ b/tests/json_infra/helpers/load_blockchain_tests.py @@ -148,6 +148,9 @@ def runtest(self) -> None: if key.startswith("expectException"): block_exception = value break + if key == "exceptions": + block_exception = value + break if block_exception: # TODO: Once all the specific exception types are thrown, diff --git a/tests/json_infra/helpers/load_state_tests.py b/tests/json_infra/helpers/load_state_tests.py index 21eea12df7..ac9532b1e6 100644 --- a/tests/json_infra/helpers/load_state_tests.py +++ b/tests/json_infra/helpers/load_state_tests.py @@ -12,10 +12,7 @@ from ethereum.exceptions import StateWithEmptyAccount from ethereum.utils.hexadecimal import hex_to_bytes from ethereum_spec_tools.evm_tools import create_parser -from ethereum_spec_tools.evm_tools.statetest import TestCase as StateTestCase -from ethereum_spec_tools.evm_tools.statetest import ( - read_test_case as read_state_test_case, -) +from ethereum_spec_tools.evm_tools.statetest import TestCase, read_test_case from ethereum_spec_tools.evm_tools.t8n import T8N from .. import FORKS @@ -30,13 +27,13 @@ class StateTest(Item): """Single state test case item.""" - test_case: StateTestCase + test_case: TestCase test_dict: Dict[str, Any] def __init__( self, *args: Any, - test_case: StateTestCase, + test_case: TestCase, test_dict: Dict[str, Any], **kwargs: Any, ) -> None: @@ -149,7 +146,7 @@ def is_format(cls, test_dict: Dict[str, Any]) -> bool: def collect(self) -> Iterable[Item | Collector]: """Collect state test cases inside of this fixture.""" - for test_case in read_state_test_case( + for test_case in read_test_case( test_file_path=self.test_file, key=self.test_key, test=self.test_dict, From 226f22cc40bbfaff04b5341c5c3bedc36499f947 Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 23 Oct 2025 18:18:36 +0000 Subject: [PATCH 07/12] refactor(tests): move some definitions --- tests/json_infra/conftest.py | 38 ++------------------------- tests/json_infra/helpers/__init__.py | 12 +++------ tests/json_infra/helpers/fixtures.py | 39 +++++++++++++++++++++++++++- 3 files changed, 44 insertions(+), 45 deletions(-) diff --git a/tests/json_infra/conftest.py b/tests/json_infra/conftest.py index 0cc080b4c8..6c64f33d26 100644 --- a/tests/json_infra/conftest.py +++ b/tests/json_infra/conftest.py @@ -1,6 +1,5 @@ """Pytest configuration for the json infra tests.""" -import json import os import shutil import tarfile @@ -9,7 +8,6 @@ from typing import ( Callable, Final, - Generator, Optional, Self, Set, @@ -22,12 +20,12 @@ from _pytest.nodes import Item from filelock import FileLock from git.exc import GitCommandError, InvalidGitRepositoryError -from pytest import Collector, File, Session, StashKey, fixture +from pytest import Collector, Session, StashKey, fixture from requests_cache import CachedSession from requests_cache.backends.sqlite import SQLiteCache from . import TEST_FIXTURES -from .helpers import ALL_FIXTURE_TYPES +from .helpers import FixturesFile try: from xdist import get_xdist_worker_id @@ -299,35 +297,3 @@ def pytest_collect_file( if file_path.suffix == ".json": return FixturesFile.from_parent(parent, path=file_path) return None - - -class FixturesFile(File): - """Single JSON file containing fixtures.""" - - def collect( - self: Self, - ) -> Generator[Item | Collector, None, None]: - """Collect test cases from a single JSON fixtures file.""" - with open(self.path, "r") as file: - try: - loaded_file = json.load(file) - except Exception: - return # Skip *.json files that are unreadable. - if not isinstance(loaded_file, dict): - return - for key, test_dict in loaded_file.items(): - if not isinstance(test_dict, dict): - continue - for fixture_type in ALL_FIXTURE_TYPES: - if not fixture_type.is_format(test_dict): - continue - name = key - if "::" in name: - name = name.split("::")[1] - yield fixture_type.from_parent( # type: ignore - parent=self, - name=name, - test_file=str(self.path), - test_key=key, - test_dict=test_dict, - ) diff --git a/tests/json_infra/helpers/__init__.py b/tests/json_infra/helpers/__init__.py index 7791d9803c..a7f548ace4 100644 --- a/tests/json_infra/helpers/__init__.py +++ b/tests/json_infra/helpers/__init__.py @@ -1,14 +1,10 @@ """Helpers to load tests from JSON files.""" -from typing import List, Type - -from .fixtures import Fixture +from .fixtures import ALL_FIXTURE_TYPES, Fixture, FixturesFile from .load_blockchain_tests import BlockchainTestFixture from .load_state_tests import StateTestFixture -ALL_FIXTURE_TYPES: List[Type[Fixture]] = [ - BlockchainTestFixture, - StateTestFixture, -] +ALL_FIXTURE_TYPES.append(BlockchainTestFixture) +ALL_FIXTURE_TYPES.append(StateTestFixture) -__all__ = ["ALL_FIXTURE_TYPES", "Fixture"] +__all__ = ["ALL_FIXTURE_TYPES", "Fixture", "FixturesFile"] diff --git a/tests/json_infra/helpers/fixtures.py b/tests/json_infra/helpers/fixtures.py index f150d33540..2f2587c76d 100644 --- a/tests/json_infra/helpers/fixtures.py +++ b/tests/json_infra/helpers/fixtures.py @@ -1,9 +1,11 @@ """Base class for all fixture loaders.""" +import json from abc import ABC, abstractmethod -from typing import Any, Dict, Self +from typing import Any, Dict, Generator, List, Self, Type from _pytest.nodes import Node +from pytest import Collector, File, Item class Fixture(ABC): @@ -47,3 +49,38 @@ def from_parent( def is_format(cls, test_dict: Dict[str, Any]) -> bool: """Return true if the object can be parsed as the fixture type.""" pass + + +ALL_FIXTURE_TYPES: List[Type[Fixture]] = [] + + +class FixturesFile(File): + """Single JSON file containing fixtures.""" + + def collect( + self: Self, + ) -> Generator[Item | Collector, None, None]: + """Collect test cases from a single JSON fixtures file.""" + with open(self.path, "r") as file: + try: + loaded_file = json.load(file) + except Exception: + return # Skip *.json files that are unreadable. + if not isinstance(loaded_file, dict): + return + for key, test_dict in loaded_file.items(): + if not isinstance(test_dict, dict): + continue + for fixture_type in ALL_FIXTURE_TYPES: + if not fixture_type.is_format(test_dict): + continue + name = key + if "::" in name: + name = name.split("::")[1] + yield fixture_type.from_parent( # type: ignore + parent=self, + name=name, + test_file=str(self.path), + test_key=key, + test_dict=test_dict, + ) From fac94338e16712990aba3e1802f72c305b8df090 Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 23 Oct 2025 18:26:16 +0000 Subject: [PATCH 08/12] fix(tox.ini): Remove `--ignore-glob` --- tox.ini | 2 -- 1 file changed, 2 deletions(-) diff --git a/tox.ini b/tox.ini index e52433e286..3358c439e9 100644 --- a/tox.ini +++ b/tox.ini @@ -55,7 +55,6 @@ commands = --cov-report "xml:{toxworkdir}/coverage.xml" \ --no-cov-on-fail \ --cov-branch \ - --ignore-glob='tests/json_infra/fixtures/*' \ --basetemp="{temp_dir}/pytest" \ tests/json_infra @@ -100,7 +99,6 @@ commands = pytest \ -m "not slow and not evm_tools" \ -n auto --maxprocesses 5 \ - --ignore-glob='tests/json_infra/fixtures/*' \ --ignore-glob='tests/test_t8n.py' \ --ignore-glob='eest_tests/*' \ --basetemp="{temp_dir}/pytest" \ From b7f14c1e927d12178713a4156f7463ba7627c44e Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 23 Oct 2025 18:40:29 +0000 Subject: [PATCH 09/12] fix(tests): workaround for FileNotFoundError --- tests/json_infra/conftest.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/json_infra/conftest.py b/tests/json_infra/conftest.py index 6c64f33d26..2ac187718d 100644 --- a/tests/json_infra/conftest.py +++ b/tests/json_infra/conftest.py @@ -272,7 +272,11 @@ def pytest_sessionstart(session: Session) -> None: for python_file in glob( os.path.join(fixture_path, "**/*.py"), recursive=True ): - os.unlink(python_file) + try: + os.unlink(python_file) + except FileNotFoundError: + # Not breaking error, another process deleted it first + pass def pytest_sessionfinish(session: Session, exitstatus: int) -> None: From f955121b0328c42c5e19d3e4eecb8c1a9df3f125 Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Thu, 23 Oct 2025 22:54:13 +0000 Subject: [PATCH 10/12] fix(tests): revamp cache fix(tests): Don't cache fixtures Try to implement cache Fix caching feat(tests): Manage cache during execution --- tests/json_infra/conftest.py | 15 +++- tests/json_infra/helpers/__init__.py | 4 +- .../helpers/exceptional_test_patterns.py | 3 - tests/json_infra/helpers/fixtures.py | 70 +++++++++++------ .../helpers/load_blockchain_tests.py | 19 ++++- tests/json_infra/helpers/load_state_tests.py | 77 ++++++++++++++----- 6 files changed, 134 insertions(+), 54 deletions(-) diff --git a/tests/json_infra/conftest.py b/tests/json_infra/conftest.py index 2ac187718d..c89f4711d9 100644 --- a/tests/json_infra/conftest.py +++ b/tests/json_infra/conftest.py @@ -25,7 +25,7 @@ from requests_cache.backends.sqlite import SQLiteCache from . import TEST_FIXTURES -from .helpers import FixturesFile +from .helpers import FixturesFile, FixtureTestItem try: from xdist import get_xdist_worker_id @@ -301,3 +301,16 @@ def pytest_collect_file( if file_path.suffix == ".json": return FixturesFile.from_parent(parent, path=file_path) return None + + +def pytest_runtest_teardown(item: Item, nextitem: Item) -> None: + """ + Drop cache from a `FixtureTestItem` if the next one is not of the + same type or does not belong to the same fixtures file. + """ + if isinstance(item, FixtureTestItem): + if not isinstance(nextitem, FixtureTestItem): + item.fixtures_file.clear_data_cache() + else: + if item.fixtures_file != nextitem.fixtures_file: + item.fixtures_file.clear_data_cache() diff --git a/tests/json_infra/helpers/__init__.py b/tests/json_infra/helpers/__init__.py index a7f548ace4..2980c854e2 100644 --- a/tests/json_infra/helpers/__init__.py +++ b/tests/json_infra/helpers/__init__.py @@ -1,10 +1,10 @@ """Helpers to load tests from JSON files.""" -from .fixtures import ALL_FIXTURE_TYPES, Fixture, FixturesFile +from .fixtures import ALL_FIXTURE_TYPES, Fixture, FixturesFile, FixtureTestItem from .load_blockchain_tests import BlockchainTestFixture from .load_state_tests import StateTestFixture ALL_FIXTURE_TYPES.append(BlockchainTestFixture) ALL_FIXTURE_TYPES.append(StateTestFixture) -__all__ = ["ALL_FIXTURE_TYPES", "Fixture", "FixturesFile"] +__all__ = ["ALL_FIXTURE_TYPES", "Fixture", "FixturesFile", "FixtureTestItem"] diff --git a/tests/json_infra/helpers/exceptional_test_patterns.py b/tests/json_infra/helpers/exceptional_test_patterns.py index 6355cfbf81..cebb1a9ef5 100644 --- a/tests/json_infra/helpers/exceptional_test_patterns.py +++ b/tests/json_infra/helpers/exceptional_test_patterns.py @@ -5,7 +5,6 @@ import re from dataclasses import dataclass -from functools import lru_cache from typing import Pattern, Tuple @@ -21,7 +20,6 @@ class TestPatterns: big_memory: Tuple[Pattern[str], ...] -@lru_cache def exceptional_blockchain_test_patterns( json_fork: str, eels_fork: str ) -> TestPatterns: @@ -106,7 +104,6 @@ def exceptional_blockchain_test_patterns( ) -@lru_cache def exceptional_state_test_patterns( json_fork: str, eels_fork: str ) -> TestPatterns: diff --git a/tests/json_infra/helpers/fixtures.py b/tests/json_infra/helpers/fixtures.py index 2f2587c76d..701dbee2e8 100644 --- a/tests/json_infra/helpers/fixtures.py +++ b/tests/json_infra/helpers/fixtures.py @@ -2,12 +2,24 @@ import json from abc import ABC, abstractmethod +from functools import cached_property from typing import Any, Dict, Generator, List, Self, Type from _pytest.nodes import Node from pytest import Collector, File, Item +class FixtureTestItem(Item): + """ + Test item that comes from a fixture file. + """ + + @property + def fixtures_file(self) -> "FixturesFile": + """Return the fixtures file from which the test was extracted.""" + raise NotImplementedError() + + class Fixture(ABC): """ Single fixture from a JSON file. @@ -18,20 +30,17 @@ class Fixture(ABC): test_file: str test_key: str - test_dict: Dict[str, Any] def __init__( self, *args: Any, test_file: str, test_key: str, - test_dict: Dict[str, Any], **kwargs: Any, ): super().__init__(*args, **kwargs) self.test_file = test_file self.test_key = test_key - self.test_dict = test_dict @classmethod def from_parent( @@ -57,30 +66,41 @@ def is_format(cls, test_dict: Dict[str, Any]) -> bool: class FixturesFile(File): """Single JSON file containing fixtures.""" + @cached_property + def data(self) -> Dict[str, Any]: + """Return the JSON data of the full file.""" + # loaded once per worker per file (thanks to cached_property) + with self.fspath.open("r", encoding="utf-8") as f: + return json.load(f) + + def clear_data_cache(self) -> None: + """Drop the data cache.""" + del self.data + def collect( self: Self, ) -> Generator[Item | Collector, None, None]: """Collect test cases from a single JSON fixtures file.""" - with open(self.path, "r") as file: - try: - loaded_file = json.load(file) - except Exception: - return # Skip *.json files that are unreadable. - if not isinstance(loaded_file, dict): - return - for key, test_dict in loaded_file.items(): - if not isinstance(test_dict, dict): + try: + loaded_file = self.data + except Exception: + return # Skip *.json files that are unreadable. + if not isinstance(loaded_file, dict): + return + for key, test_dict in loaded_file.items(): + if not isinstance(test_dict, dict): + continue + for fixture_type in ALL_FIXTURE_TYPES: + if not fixture_type.is_format(test_dict): continue - for fixture_type in ALL_FIXTURE_TYPES: - if not fixture_type.is_format(test_dict): - continue - name = key - if "::" in name: - name = name.split("::")[1] - yield fixture_type.from_parent( # type: ignore - parent=self, - name=name, - test_file=str(self.path), - test_key=key, - test_dict=test_dict, - ) + name = key + if "::" in name: + name = name.split("::")[1] + yield fixture_type.from_parent( # type: ignore + parent=self, + name=name, + test_file=str(self.path), + test_key=key, + ) + # Make sure we don't keep anything from collection in memory. + self.clear_data_cache() diff --git a/tests/json_infra/helpers/load_blockchain_tests.py b/tests/json_infra/helpers/load_blockchain_tests.py index d312eae2e6..5ee4695d1e 100644 --- a/tests/json_infra/helpers/load_blockchain_tests.py +++ b/tests/json_infra/helpers/load_blockchain_tests.py @@ -9,7 +9,6 @@ from ethereum_rlp import rlp from ethereum_rlp.exceptions import RLPException from ethereum_types.numeric import U64 -from pytest import Item from ethereum.crypto.hash import keccak256 from ethereum.exceptions import EthereumException, StateWithEmptyAccount @@ -18,7 +17,7 @@ from .. import FORKS from .exceptional_test_patterns import exceptional_blockchain_test_patterns -from .fixtures import Fixture +from .fixtures import Fixture, FixturesFile, FixtureTestItem class NoTestsFoundError(Exception): @@ -59,7 +58,7 @@ def add_block_to_chain( ) -class BlockchainTestFixture(Fixture, Item): +class BlockchainTestFixture(Fixture, FixtureTestItem): """Single blockchain test fixture from a JSON file.""" fork_name: str @@ -91,6 +90,20 @@ def __init__( if any(x.search(_identifier) for x in test_patterns.big_memory): self.add_marker("bigmem") + @property + def fixtures_file(self) -> FixturesFile: + """Fixtures file from which the test fixture was collected.""" + parent = self.parent + assert parent is not None + assert isinstance(parent, FixturesFile) + return parent + + @property + def test_dict(self) -> Dict[str, Any]: + """Load test from disk.""" + loaded_file = self.fixtures_file.data + return loaded_file[self.test_key] + def runtest(self) -> None: """Run a blockchain state test from JSON test case data.""" json_data = self.test_dict diff --git a/tests/json_infra/helpers/load_state_tests.py b/tests/json_infra/helpers/load_state_tests.py index ac9532b1e6..dece8307eb 100644 --- a/tests/json_infra/helpers/load_state_tests.py +++ b/tests/json_infra/helpers/load_state_tests.py @@ -12,51 +12,73 @@ from ethereum.exceptions import StateWithEmptyAccount from ethereum.utils.hexadecimal import hex_to_bytes from ethereum_spec_tools.evm_tools import create_parser -from ethereum_spec_tools.evm_tools.statetest import TestCase, read_test_case +from ethereum_spec_tools.evm_tools.statetest import read_test_case from ethereum_spec_tools.evm_tools.t8n import T8N from .. import FORKS from .exceptional_test_patterns import ( exceptional_state_test_patterns, ) -from .fixtures import Fixture +from .fixtures import Fixture, FixturesFile, FixtureTestItem parser = create_parser() -class StateTest(Item): +class StateTest(FixtureTestItem): """Single state test case item.""" - test_case: TestCase - test_dict: Dict[str, Any] + index: int + fork_name: str def __init__( self, *args: Any, - test_case: TestCase, - test_dict: Dict[str, Any], + index: int, + fork_name: str, + key: str, **kwargs: Any, ) -> None: """Initialize a single test case item.""" super().__init__(*args, **kwargs) - self.test_case = test_case - self.test_dict = test_dict - self.add_marker(pytest.mark.fork(self.test_case.fork_name)) + self.index = index + self.fork_name = fork_name + self.add_marker(pytest.mark.fork(self.fork_name)) self.add_marker("evm_tools") self.add_marker("json_state_tests") - eels_fork = FORKS[test_case.fork_name]["eels_fork"] - test_patterns = exceptional_state_test_patterns( - test_case.fork_name, eels_fork - ) - if any(x.search(test_case.key) for x in test_patterns.slow): + eels_fork = FORKS[fork_name]["eels_fork"] + test_patterns = exceptional_state_test_patterns(fork_name, eels_fork) + if any(x.search(key) for x in test_patterns.slow): self.add_marker("slow") + @property + def state_test_fixture(self) -> "StateTestFixture": + """Return the state test fixture this test belongs to.""" + parent = self.parent + assert parent is not None + assert isinstance(parent, StateTestFixture) + return parent + + @property + def test_key(self) -> str: + """Return the key of the state test fixture in the fixture file.""" + return self.state_test_fixture.test_key + + @property + def fixtures_file(self) -> FixturesFile: + """Fixtures file from which the test fixture was collected.""" + return self.state_test_fixture.fixtures_file + + @property + def test_dict(self) -> Dict[str, Any]: + """Load test from disk.""" + loaded_file = self.fixtures_file.data + return loaded_file[self.test_key] + def runtest(self) -> None: """ Runs a single general state test. """ - index = self.test_case.index - json_fork = self.test_case.fork_name + json_fork = self.fork_name test_dict = self.test_dict env = test_dict["env"] @@ -68,7 +90,7 @@ def runtest(self) -> None: alloc = test_dict["pre"] - post = test_dict["post"][json_fork][index] + post = test_dict["post"][self.fork_name][self.index] post_hash = post["hash"] d = post["indexes"]["data"] g = post["indexes"]["gas"] @@ -144,6 +166,20 @@ def is_format(cls, test_dict: Dict[str, Any]) -> bool: return False return True + @property + def fixtures_file(self) -> FixturesFile: + """Fixtures file from which the test fixture was collected.""" + parent = self.parent + assert parent is not None + assert isinstance(parent, FixturesFile) + return parent + + @property + def test_dict(self) -> Dict[str, Any]: + """Load test from disk.""" + loaded_file = self.fixtures_file.data + return loaded_file[self.test_key] + def collect(self) -> Iterable[Item | Collector]: """Collect state test cases inside of this fixture.""" for test_case in read_test_case( @@ -157,6 +193,7 @@ def collect(self) -> Iterable[Item | Collector]: yield StateTest.from_parent( parent=self, name=name, - test_case=test_case, - test_dict=self.test_dict, + index=test_case.index, + fork_name=test_case.fork_name, + key=self.test_key, ) From dde753239e907e8a4d95bd6af0a7933f6d263ca8 Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Sat, 1 Nov 2025 00:06:00 +0000 Subject: [PATCH 11/12] fix(tox): Use `--dist=loadfile` --- tox.ini | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index 3358c439e9..14e3019322 100644 --- a/tox.ini +++ b/tox.ini @@ -48,7 +48,7 @@ commands = commands = pytest \ -m "not slow" \ - -n auto --maxprocesses 6 \ + -n auto --maxprocesses 6 --dist=loadfile \ --cov-config=pyproject.toml \ --cov=ethereum \ --cov-report=term \ @@ -98,7 +98,7 @@ passenv = commands = pytest \ -m "not slow and not evm_tools" \ - -n auto --maxprocesses 5 \ + -n auto --maxprocesses 5 --dist=loadfile \ --ignore-glob='tests/test_t8n.py' \ --ignore-glob='eest_tests/*' \ --basetemp="{temp_dir}/pytest" \ From 011051133789d4e4057503ce983e9cbbf11b1035 Mon Sep 17 00:00:00 2001 From: Mario Vega Date: Mon, 3 Nov 2025 22:59:08 +0000 Subject: [PATCH 12/12] fix(tests): json files cache --- tests/json_infra/conftest.py | 9 ++++---- tests/json_infra/helpers/fixtures.py | 33 ++++++++++++++-------------- 2 files changed, 21 insertions(+), 21 deletions(-) diff --git a/tests/json_infra/conftest.py b/tests/json_infra/conftest.py index c89f4711d9..7545579fa5 100644 --- a/tests/json_infra/conftest.py +++ b/tests/json_infra/conftest.py @@ -309,8 +309,9 @@ def pytest_runtest_teardown(item: Item, nextitem: Item) -> None: same type or does not belong to the same fixtures file. """ if isinstance(item, FixtureTestItem): - if not isinstance(nextitem, FixtureTestItem): + if ( + nextitem is None + or not isinstance(nextitem, FixtureTestItem) + or item.fixtures_file != nextitem.fixtures_file + ): item.fixtures_file.clear_data_cache() - else: - if item.fixtures_file != nextitem.fixtures_file: - item.fixtures_file.clear_data_cache() diff --git a/tests/json_infra/helpers/fixtures.py b/tests/json_infra/helpers/fixtures.py index 701dbee2e8..0189b98c39 100644 --- a/tests/json_infra/helpers/fixtures.py +++ b/tests/json_infra/helpers/fixtures.py @@ -75,7 +75,7 @@ def data(self) -> Dict[str, Any]: def clear_data_cache(self) -> None: """Drop the data cache.""" - del self.data + self.__dict__.pop("data", None) def collect( self: Self, @@ -85,22 +85,21 @@ def collect( loaded_file = self.data except Exception: return # Skip *.json files that are unreadable. - if not isinstance(loaded_file, dict): - return - for key, test_dict in loaded_file.items(): - if not isinstance(test_dict, dict): - continue - for fixture_type in ALL_FIXTURE_TYPES: - if not fixture_type.is_format(test_dict): + if isinstance(loaded_file, dict): + for key, test_dict in loaded_file.items(): + if not isinstance(test_dict, dict): continue - name = key - if "::" in name: - name = name.split("::")[1] - yield fixture_type.from_parent( # type: ignore - parent=self, - name=name, - test_file=str(self.path), - test_key=key, - ) + for fixture_type in ALL_FIXTURE_TYPES: + if not fixture_type.is_format(test_dict): + continue + name = key + if "::" in name: + name = name.split("::")[1] + yield fixture_type.from_parent( # type: ignore + parent=self, + name=name, + test_file=str(self.path), + test_key=key, + ) # Make sure we don't keep anything from collection in memory. self.clear_data_cache()