diff --git a/src/ethereum_spec_tools/evm_tools/statetest/__init__.py b/src/ethereum_spec_tools/evm_tools/statetest/__init__.py index cd58cb3028..8015d43672 100644 --- a/src/ethereum_spec_tools/evm_tools/statetest/__init__.py +++ b/src/ethereum_spec_tools/evm_tools/statetest/__init__.py @@ -9,7 +9,7 @@ from copy import deepcopy from dataclasses import dataclass from io import StringIO -from typing import Any, Dict, Iterable, List, Optional, TextIO +from typing import Any, Dict, Generator, Iterable, List, Optional, TextIO from ethereum.utils.hexadecimal import hex_to_bytes @@ -35,6 +35,41 @@ class TestCase: transaction: Dict +def read_test_case( + test_file_path: str, key: str, test: Dict[str, Any] +) -> Generator[TestCase, None, None]: + """ + Given a key and a value, return a `TestCase` object. + """ + env = test["env"] + if not isinstance(env, dict): + raise TypeError("env not dict") + + pre = test["pre"] + if not isinstance(pre, dict): + raise TypeError("pre not dict") + + transaction = test["transaction"] + if not isinstance(transaction, dict): + raise TypeError("transaction not dict") + + for fork_name, content in test["post"].items(): + for idx, post in enumerate(content): + if not isinstance(post, dict): + raise TypeError(f'post["{fork_name}"] not dict') + + yield TestCase( + path=test_file_path, + key=key, + index=idx, + fork_name=fork_name, + post=post, + env=env, + pre=pre, + transaction=transaction, + ) + + def read_test_cases(test_file_path: str) -> Iterable[TestCase]: """ Given a path to a filled state test in JSON format, return all the @@ -44,33 +79,7 @@ def read_test_cases(test_file_path: str) -> Iterable[TestCase]: tests = json.load(test_file) for key, test in tests.items(): - env = test["env"] - if not isinstance(env, dict): - raise TypeError("env not dict") - - pre = test["pre"] - if not isinstance(pre, dict): - raise TypeError("pre not dict") - - transaction = test["transaction"] - if not isinstance(transaction, dict): - raise TypeError("transaction not dict") - - for fork_name, content in test["post"].items(): - for idx, post in enumerate(content): - if not isinstance(post, dict): - raise TypeError(f'post["{fork_name}"] not dict') - - yield TestCase( - path=test_file_path, - key=key, - index=idx, - fork_name=fork_name, - post=post, - env=env, - pre=pre, - transaction=transaction, - ) + yield from read_test_case(test_file_path, key, test) def run_test_case( diff --git a/tests/json_infra/conftest.py b/tests/json_infra/conftest.py index ee19715578..7545579fa5 100644 --- a/tests/json_infra/conftest.py +++ b/tests/json_infra/conftest.py @@ -3,8 +3,15 @@ import os import shutil import tarfile +from glob import glob from pathlib import Path -from typing import Callable, Final, Optional, Set +from typing import ( + Callable, + Final, + Optional, + Self, + Set, +) import git import requests_cache @@ -13,12 +20,12 @@ from _pytest.nodes import Item from filelock import FileLock from git.exc import GitCommandError, InvalidGitRepositoryError -from pytest import Session, StashKey, fixture +from pytest import Collector, Session, StashKey, fixture from requests_cache import CachedSession from requests_cache.backends.sqlite import SQLiteCache -from typing_extensions import Self from . import TEST_FIXTURES +from .helpers import FixturesFile, FixtureTestItem try: from xdist import get_xdist_worker_id @@ -260,6 +267,17 @@ def pytest_sessionstart(session: Session) -> None: fixture_path, ) + # Remove any python files in the downloaded files to avoid + # importing them. + for python_file in glob( + os.path.join(fixture_path, "**/*.py"), recursive=True + ): + try: + os.unlink(python_file) + except FileNotFoundError: + # Not breaking error, another process deleted it first + pass + def pytest_sessionfinish(session: Session, exitstatus: int) -> None: """Clean up file locks at session finish.""" @@ -272,3 +290,28 @@ def pytest_sessionfinish(session: Session, exitstatus: int) -> None: assert lock_file is not None lock_file.release() + + +def pytest_collect_file( + file_path: Path, parent: Collector +) -> Collector | None: + """ + Pytest hook that collects test cases from fixture JSON files. + """ + if file_path.suffix == ".json": + return FixturesFile.from_parent(parent, path=file_path) + return None + + +def pytest_runtest_teardown(item: Item, nextitem: Item) -> None: + """ + Drop cache from a `FixtureTestItem` if the next one is not of the + same type or does not belong to the same fixtures file. + """ + if isinstance(item, FixtureTestItem): + if ( + nextitem is None + or not isinstance(nextitem, FixtureTestItem) + or item.fixtures_file != nextitem.fixtures_file + ): + item.fixtures_file.clear_data_cache() diff --git a/tests/json_infra/helpers/__init__.py b/tests/json_infra/helpers/__init__.py index 3214c2cc14..2980c854e2 100644 --- a/tests/json_infra/helpers/__init__.py +++ b/tests/json_infra/helpers/__init__.py @@ -1 +1,10 @@ """Helpers to load tests from JSON files.""" + +from .fixtures import ALL_FIXTURE_TYPES, Fixture, FixturesFile, FixtureTestItem +from .load_blockchain_tests import BlockchainTestFixture +from .load_state_tests import StateTestFixture + +ALL_FIXTURE_TYPES.append(BlockchainTestFixture) +ALL_FIXTURE_TYPES.append(StateTestFixture) + +__all__ = ["ALL_FIXTURE_TYPES", "Fixture", "FixturesFile", "FixtureTestItem"] diff --git a/tests/json_infra/helpers/fixtures.py b/tests/json_infra/helpers/fixtures.py new file mode 100644 index 0000000000..0189b98c39 --- /dev/null +++ b/tests/json_infra/helpers/fixtures.py @@ -0,0 +1,105 @@ +"""Base class for all fixture loaders.""" + +import json +from abc import ABC, abstractmethod +from functools import cached_property +from typing import Any, Dict, Generator, List, Self, Type + +from _pytest.nodes import Node +from pytest import Collector, File, Item + + +class FixtureTestItem(Item): + """ + Test item that comes from a fixture file. + """ + + @property + def fixtures_file(self) -> "FixturesFile": + """Return the fixtures file from which the test was extracted.""" + raise NotImplementedError() + + +class Fixture(ABC): + """ + Single fixture from a JSON file. + + It can be subclassed in combination with Item or Collector to create a + fixture that can be collected by pytest. + """ + + test_file: str + test_key: str + + def __init__( + self, + *args: Any, + test_file: str, + test_key: str, + **kwargs: Any, + ): + super().__init__(*args, **kwargs) + self.test_file = test_file + self.test_key = test_key + + @classmethod + def from_parent( + cls, + parent: Node, + **kwargs: Any, + ) -> Self: + """Pytest hook that returns a fixture from a JSON file.""" + return super().from_parent( # type: ignore[misc] + parent=parent, **kwargs + ) + + @classmethod + @abstractmethod + def is_format(cls, test_dict: Dict[str, Any]) -> bool: + """Return true if the object can be parsed as the fixture type.""" + pass + + +ALL_FIXTURE_TYPES: List[Type[Fixture]] = [] + + +class FixturesFile(File): + """Single JSON file containing fixtures.""" + + @cached_property + def data(self) -> Dict[str, Any]: + """Return the JSON data of the full file.""" + # loaded once per worker per file (thanks to cached_property) + with self.fspath.open("r", encoding="utf-8") as f: + return json.load(f) + + def clear_data_cache(self) -> None: + """Drop the data cache.""" + self.__dict__.pop("data", None) + + def collect( + self: Self, + ) -> Generator[Item | Collector, None, None]: + """Collect test cases from a single JSON fixtures file.""" + try: + loaded_file = self.data + except Exception: + return # Skip *.json files that are unreadable. + if isinstance(loaded_file, dict): + for key, test_dict in loaded_file.items(): + if not isinstance(test_dict, dict): + continue + for fixture_type in ALL_FIXTURE_TYPES: + if not fixture_type.is_format(test_dict): + continue + name = key + if "::" in name: + name = name.split("::")[1] + yield fixture_type.from_parent( # type: ignore + parent=self, + name=name, + test_file=str(self.path), + test_key=key, + ) + # Make sure we don't keep anything from collection in memory. + self.clear_data_cache() diff --git a/tests/json_infra/helpers/load_blockchain_tests.py b/tests/json_infra/helpers/load_blockchain_tests.py index 990e941a35..5ee4695d1e 100644 --- a/tests/json_infra/helpers/load_blockchain_tests.py +++ b/tests/json_infra/helpers/load_blockchain_tests.py @@ -1,14 +1,11 @@ """Helpers to load and run blockchain tests from JSON files.""" import importlib -import json -import os.path -from glob import glob -from typing import Any, Dict, Generator +from pathlib import Path +from typing import Any, Dict, Tuple from unittest.mock import call, patch import pytest -from _pytest.mark.structures import ParameterSet from ethereum_rlp import rlp from ethereum_rlp.exceptions import RLPException from ethereum_types.numeric import U64 @@ -20,6 +17,7 @@ from .. import FORKS from .exceptional_test_patterns import exceptional_blockchain_test_patterns +from .fixtures import Fixture, FixturesFile, FixtureTestItem class NoTestsFoundError(Exception): @@ -29,79 +27,6 @@ class NoTestsFoundError(Exception): """ -def run_blockchain_st_test(test_case: Dict, load: Load) -> None: - """Run a blockchain state test from JSON test case data.""" - test_file = test_case["test_file"] - test_key = test_case["test_key"] - - with open(test_file, "r") as fp: - data = json.load(fp) - - json_data = data[test_key] - - if "postState" not in json_data: - pytest.xfail(f"{test_case} doesn't have post state") - - genesis_header = load.json_to_header(json_data["genesisBlockHeader"]) - parameters = [ - genesis_header, - (), - (), - ] - if hasattr(genesis_header, "withdrawals_root"): - parameters.append(()) - - if hasattr(genesis_header, "requests_root"): - parameters.append(()) - - genesis_block = load.fork.Block(*parameters) - - genesis_header_hash = hex_to_bytes(json_data["genesisBlockHeader"]["hash"]) - assert keccak256(rlp.encode(genesis_header)) == genesis_header_hash - genesis_rlp = hex_to_bytes(json_data["genesisRLP"]) - assert rlp.encode(genesis_block) == genesis_rlp - - try: - state = load.json_to_state(json_data["pre"]) - except StateWithEmptyAccount as e: - pytest.xfail(str(e)) - - chain = load.fork.BlockChain( - blocks=[genesis_block], - state=state, - chain_id=U64(json_data["genesisBlockHeader"].get("chainId", 1)), - ) - - mock_pow = ( - json_data["sealEngine"] == "NoProof" and not load.fork.proof_of_stake - ) - - for json_block in json_data["blocks"]: - block_exception = None - for key, value in json_block.items(): - if key.startswith("expectException"): - block_exception = value - break - - if block_exception: - # TODO: Once all the specific exception types are thrown, - # only `pytest.raises` the correct exception type instead of - # all of them. - with pytest.raises((EthereumException, RLPException)): - add_block_to_chain(chain, json_block, load, mock_pow) - return - else: - add_block_to_chain(chain, json_block, load, mock_pow) - - last_block_hash = hex_to_bytes(json_data["lastblockhash"]) - assert keccak256(rlp.encode(chain.blocks[-1].header)) == last_block_hash - - expected_post_state = load.json_to_state(json_data["postState"]) - assert chain.state == expected_post_state - load.fork.close_state(chain.state) - load.fork.close_state(expected_post_state) - - def add_block_to_chain( chain: Any, json_block: Any, load: Load, mock_pow: bool ) -> None: @@ -133,96 +58,150 @@ def add_block_to_chain( ) -# Functions that fetch individual test cases -def load_json_fixture(test_file: str, json_fork: str) -> Generator: - """Load test cases from a JSON fixture file for the specified fork.""" - # Extract the pure basename of the file without the path to the file. - # Ex: Extract "world.json" from "path/to/file/world.json" - # Extract the filename without the extension. Ex: Extract "world" from - # "world.json" - with open(test_file, "r") as fp: - data = json.load(fp) - - # Search tests by looking at the `network` attribute - found_keys = [] - for key, test in data.items(): - if "network" not in test: - continue - - if test["network"] == json_fork: - found_keys.append(key) - - if not any(found_keys): - raise NoTestsFoundError - - for _key in found_keys: - yield { - "test_file": test_file, - "test_key": _key, - "json_fork": json_fork, - } - - -def fetch_blockchain_tests( - json_fork: str, -) -> Generator[Dict | ParameterSet, None, None]: - """Fetch all blockchain test cases for the specified JSON fork.""" - # Filter FORKS based on fork_option parameter - eels_fork = FORKS[json_fork]["eels_fork"] - test_dirs = FORKS[json_fork]["blockchain_test_dirs"] - - test_patterns = exceptional_blockchain_test_patterns(json_fork, eels_fork) - - # Get all the files to iterate over from both eest_tests_path - # and ethereum_tests_path - all_jsons = [] - for test_dir in test_dirs: - all_jsons.extend( - glob(os.path.join(test_dir, "**/*.json"), recursive=True) +class BlockchainTestFixture(Fixture, FixtureTestItem): + """Single blockchain test fixture from a JSON file.""" + + fork_name: str + + def __init__( + self, + *args: Any, + **kwargs: Any, + ) -> None: + """Initialize a single blockchain test fixture from a JSON file.""" + super().__init__(*args, **kwargs) + self.fork_name = self.test_dict["network"] + self.add_marker(pytest.mark.fork(self.fork_name)) + self.add_marker("evm_tools") + self.add_marker("json_blockchain_tests") + eels_fork = FORKS[self.fork_name]["eels_fork"] + test_patterns = exceptional_blockchain_test_patterns( + self.fork_name, eels_fork ) + assert self.test_file is not None + assert self.test_key is not None + _identifier = "(" + self.test_file + "|" + self.test_key + ")" + if any( + x.search(self.test_file) for x in test_patterns.expected_fail + ) or any(x.search(_identifier) for x in test_patterns.expected_fail): + self.add_marker(pytest.mark.skip("Expected to fail")) + if any(x.search(_identifier) for x in test_patterns.slow): + self.add_marker("slow") + if any(x.search(_identifier) for x in test_patterns.big_memory): + self.add_marker("bigmem") + + @property + def fixtures_file(self) -> FixturesFile: + """Fixtures file from which the test fixture was collected.""" + parent = self.parent + assert parent is not None + assert isinstance(parent, FixturesFile) + return parent + + @property + def test_dict(self) -> Dict[str, Any]: + """Load test from disk.""" + loaded_file = self.fixtures_file.data + return loaded_file[self.test_key] + + def runtest(self) -> None: + """Run a blockchain state test from JSON test case data.""" + json_data = self.test_dict + if "postState" not in json_data: + pytest.xfail( + f"{self.test_file}[{self.test_key}] doesn't have post state" + ) + + eels_fork = FORKS[self.fork_name]["eels_fork"] + load = Load( + self.fork_name, + eels_fork, + ) + + genesis_header = load.json_to_header(json_data["genesisBlockHeader"]) + parameters = [ + genesis_header, + (), + (), + ] + if hasattr(genesis_header, "withdrawals_root"): + parameters.append(()) + + if hasattr(genesis_header, "requests_root"): + parameters.append(()) - files_to_iterate = [] - for full_path in all_jsons: - if not any(x.search(full_path) for x in test_patterns.expected_fail): - # If a file or folder is marked for ignore, - # it can already be dropped at this stage - files_to_iterate.append(full_path) + genesis_block = load.fork.Block(*parameters) + + genesis_header_hash = hex_to_bytes( + json_data["genesisBlockHeader"]["hash"] + ) + assert keccak256(rlp.encode(genesis_header)) == genesis_header_hash + genesis_rlp = hex_to_bytes(json_data["genesisRLP"]) + assert rlp.encode(genesis_block) == genesis_rlp - # Start yielding individual test cases from the file list - for _test_file in files_to_iterate: try: - for _test_case in load_json_fixture(_test_file, json_fork): - # _identifier could identify files, folders through test_file - # individual cases through test_key - _identifier = ( - "(" - + _test_case["test_file"] - + "|" - + _test_case["test_key"] - + ")" - ) - _test_case["eels_fork"] = eels_fork - if any( - x.search(_identifier) for x in test_patterns.expected_fail - ): - continue - elif any(x.search(_identifier) for x in test_patterns.slow): - yield pytest.param(_test_case, marks=pytest.mark.slow) - elif any( - x.search(_identifier) for x in test_patterns.big_memory - ): - yield pytest.param(_test_case, marks=pytest.mark.bigmem) - else: - yield _test_case - except NoTestsFoundError: - # file doesn't contain tests for the given fork - continue - - -# Test case Identifier -def idfn(test_case: Dict) -> str: - """Generate test case identifier from test case dictionary.""" - if isinstance(test_case, dict): - folder_name = test_case["test_file"].split("/")[-2] - # Assign Folder name and test_key to identify tests in output - return folder_name + " - " + test_case["test_key"] + state = load.json_to_state(json_data["pre"]) + except StateWithEmptyAccount as e: + pytest.xfail(str(e)) + + chain = load.fork.BlockChain( + blocks=[genesis_block], + state=state, + chain_id=U64(json_data["genesisBlockHeader"].get("chainId", 1)), + ) + + mock_pow = ( + json_data["sealEngine"] == "NoProof" + and not load.fork.proof_of_stake + ) + + for json_block in json_data["blocks"]: + block_exception = None + for key, value in json_block.items(): + if key.startswith("expectException"): + block_exception = value + break + if key == "exceptions": + block_exception = value + break + + if block_exception: + # TODO: Once all the specific exception types are thrown, + # only `pytest.raises` the correct exception type instead + # of all of them. + with pytest.raises((EthereumException, RLPException)): + add_block_to_chain(chain, json_block, load, mock_pow) + return + else: + add_block_to_chain(chain, json_block, load, mock_pow) + + last_block_hash = hex_to_bytes(json_data["lastblockhash"]) + assert ( + keccak256(rlp.encode(chain.blocks[-1].header)) == last_block_hash + ) + + expected_post_state = load.json_to_state(json_data["postState"]) + assert chain.state == expected_post_state + load.fork.close_state(chain.state) + load.fork.close_state(expected_post_state) + + def reportinfo(self) -> Tuple[Path, int, str]: + """Return information for test reporting.""" + return self.path, 1, self.name + + @classmethod + def is_format(cls, test_dict: Dict[str, Any]) -> bool: + """Return true if the object can be parsed as the fixture type.""" + if "genesisBlockHeader" not in test_dict: + return False + if "blocks" not in test_dict: + return False + if "engineNewPayloads" in test_dict: + return False + if "preHash" in test_dict: + return False + if "network" not in test_dict: + return False + if test_dict["network"] not in FORKS: + return False + return True diff --git a/tests/json_infra/helpers/load_state_tests.py b/tests/json_infra/helpers/load_state_tests.py index 37e6813402..dece8307eb 100644 --- a/tests/json_infra/helpers/load_state_tests.py +++ b/tests/json_infra/helpers/load_state_tests.py @@ -1,148 +1,199 @@ """Helper functions to load and run general state tests for Ethereum forks.""" import json -import os import sys -from glob import glob from io import StringIO -from typing import Dict, Generator +from typing import Any, Dict, Iterable import pytest +from _pytest.nodes import Item +from pytest import Collector from ethereum.exceptions import StateWithEmptyAccount from ethereum.utils.hexadecimal import hex_to_bytes from ethereum_spec_tools.evm_tools import create_parser -from ethereum_spec_tools.evm_tools.statetest import read_test_cases +from ethereum_spec_tools.evm_tools.statetest import read_test_case from ethereum_spec_tools.evm_tools.t8n import T8N from .. import FORKS -from .exceptional_test_patterns import exceptional_state_test_patterns +from .exceptional_test_patterns import ( + exceptional_state_test_patterns, +) +from .fixtures import Fixture, FixturesFile, FixtureTestItem parser = create_parser() -def fetch_state_tests(json_fork: str) -> Generator: - """ - Fetches all the general state tests from the given directory. - """ - # Filter FORKS based on fork_option parameter - eels_fork = FORKS[json_fork]["eels_fork"] - test_dirs = FORKS[json_fork]["state_test_dirs"] - - test_patterns = exceptional_state_test_patterns(json_fork, eels_fork) - - # Get all the files to iterate over from both eest_tests_path - # and ethereum_tests_path - all_jsons = [] - for test_dir in test_dirs: - all_jsons.extend( - glob(os.path.join(test_dir, "**/*.json"), recursive=True) +class StateTest(FixtureTestItem): + """Single state test case item.""" + + index: int + fork_name: str + + def __init__( + self, + *args: Any, + index: int, + fork_name: str, + key: str, + **kwargs: Any, + ) -> None: + """Initialize a single test case item.""" + super().__init__(*args, **kwargs) + self.index = index + self.fork_name = fork_name + self.add_marker(pytest.mark.fork(self.fork_name)) + self.add_marker("evm_tools") + self.add_marker("json_state_tests") + eels_fork = FORKS[fork_name]["eels_fork"] + test_patterns = exceptional_state_test_patterns(fork_name, eels_fork) + if any(x.search(key) for x in test_patterns.slow): + self.add_marker("slow") + + @property + def state_test_fixture(self) -> "StateTestFixture": + """Return the state test fixture this test belongs to.""" + parent = self.parent + assert parent is not None + assert isinstance(parent, StateTestFixture) + return parent + + @property + def test_key(self) -> str: + """Return the key of the state test fixture in the fixture file.""" + return self.state_test_fixture.test_key + + @property + def fixtures_file(self) -> FixturesFile: + """Fixtures file from which the test fixture was collected.""" + return self.state_test_fixture.fixtures_file + + @property + def test_dict(self) -> Dict[str, Any]: + """Load test from disk.""" + loaded_file = self.fixtures_file.data + return loaded_file[self.test_key] + + def runtest(self) -> None: + """ + Runs a single general state test. + """ + json_fork = self.fork_name + test_dict = self.test_dict + + env = test_dict["env"] + try: + env["blockHashes"] = {"0": env["previousHash"]} + except KeyError: + env["blockHashes"] = {} + env["withdrawals"] = [] + + alloc = test_dict["pre"] + + post = test_dict["post"][self.fork_name][self.index] + post_hash = post["hash"] + d = post["indexes"]["data"] + g = post["indexes"]["gas"] + v = post["indexes"]["value"] + + tx = {} + for k, value in test_dict["transaction"].items(): + if k == "data": + tx["input"] = value[d] + elif k == "gasLimit": + tx["gas"] = value[g] + elif k == "value": + tx[k] = value[v] + elif k == "accessLists": + if value[d] is not None: + tx["accessList"] = value[d] + else: + tx[k] = value + + txs = [tx] + + in_stream = StringIO( + json.dumps( + { + "env": env, + "alloc": alloc, + "txs": txs, + } + ) ) - for test_file_path in all_jsons: - test_cases = read_test_cases(test_file_path) + # Run the t8n tool + t8n_args = [ + "t8n", + "--input.alloc", + "stdin", + "--input.env", + "stdin", + "--input.txs", + "stdin", + "--state.fork", + f"{json_fork}", + "--state-test", + ] + t8n_options = parser.parse_args(t8n_args) - for test_case in test_cases: - if test_case.fork_name != json_fork: - continue + try: + t8n = T8N(t8n_options, sys.stdout, in_stream) + except StateWithEmptyAccount as e: + pytest.xfail(str(e)) - test_case_dict = { - "test_file": test_case.path, - "test_key": test_case.key, - "index": test_case.index, - "json_fork": json_fork, - } + t8n.run_state_test() - if any(x.search(test_case.key) for x in test_patterns.slow): - yield pytest.param(test_case_dict, marks=pytest.mark.slow) - else: - yield test_case_dict + assert hex_to_bytes(post_hash) == t8n.result.state_root -def idfn(test_case: Dict) -> str: +class StateTestFixture(Fixture, Collector): """ - Identify the test case. + State test fixture from a JSON file that can contain multiple test + cases. """ - if isinstance(test_case, dict): - folder_name = test_case["test_file"].split("/")[-2] - test_key = test_case["test_key"] - index = test_case["index"] - - return f"{folder_name} - {test_key} - {index}" - -def run_state_test(test_case: Dict[str, str]) -> None: - """ - Runs a single general state test. - """ - test_file = test_case["test_file"] - test_key = test_case["test_key"] - index = test_case["index"] - json_fork = test_case["json_fork"] - with open(test_file) as f: - tests = json.load(f) - - env = tests[test_key]["env"] - try: - env["blockHashes"] = {"0": env["previousHash"]} - except KeyError: - env["blockHashes"] = {} - env["withdrawals"] = [] - - alloc = tests[test_key]["pre"] - - post = tests[test_key]["post"][json_fork][index] - post_hash = post["hash"] - d = post["indexes"]["data"] - g = post["indexes"]["gas"] - v = post["indexes"]["value"] - - tx = {} - for k, value in tests[test_key]["transaction"].items(): - if k == "data": - tx["input"] = value[d] - elif k == "gasLimit": - tx["gas"] = value[g] - elif k == "value": - tx[k] = value[v] - elif k == "accessLists": - if value[d] is not None: - tx["accessList"] = value[d] - else: - tx[k] = value - - txs = [tx] - - in_stream = StringIO( - json.dumps( - { - "env": env, - "alloc": alloc, - "txs": txs, - } - ) - ) - - # Run the t8n tool - t8n_args = [ - "t8n", - "--input.alloc", - "stdin", - "--input.env", - "stdin", - "--input.txs", - "stdin", - "--state.fork", - f"{json_fork}", - "--state-test", - ] - t8n_options = parser.parse_args(t8n_args) - - try: - t8n = T8N(t8n_options, sys.stdout, in_stream) - except StateWithEmptyAccount as e: - pytest.xfail(str(e)) - - t8n.run_state_test() - - assert hex_to_bytes(post_hash) == t8n.result.state_root + @classmethod + def is_format(cls, test_dict: Dict[str, Any]) -> bool: + """Return true if the object can be parsed as the fixture type.""" + if "env" not in test_dict: + return False + if "pre" not in test_dict: + return False + if "transaction" not in test_dict: + return False + if "post" not in test_dict: + return False + return True + + @property + def fixtures_file(self) -> FixturesFile: + """Fixtures file from which the test fixture was collected.""" + parent = self.parent + assert parent is not None + assert isinstance(parent, FixturesFile) + return parent + + @property + def test_dict(self) -> Dict[str, Any]: + """Load test from disk.""" + loaded_file = self.fixtures_file.data + return loaded_file[self.test_key] + + def collect(self) -> Iterable[Item | Collector]: + """Collect state test cases inside of this fixture.""" + for test_case in read_test_case( + test_file_path=self.test_file, + key=self.test_key, + test=self.test_dict, + ): + if test_case.fork_name not in FORKS: + continue + name = f"{test_case.index}" + yield StateTest.from_parent( + parent=self, + name=name, + index=test_case.index, + fork_name=test_case.fork_name, + key=self.test_key, + ) diff --git a/tests/json_infra/test_blockchain_tests.py b/tests/json_infra/test_blockchain_tests.py deleted file mode 100644 index 9e19a361cf..0000000000 --- a/tests/json_infra/test_blockchain_tests.py +++ /dev/null @@ -1,40 +0,0 @@ -"""Run the blockchain tests from json fixtures.""" - -from typing import Callable, Dict - -import pytest - -from . import FORKS -from .helpers.load_blockchain_tests import ( - Load, - fetch_blockchain_tests, - idfn, - run_blockchain_st_test, -) - - -def _generate_test_function(fork_name: str) -> Callable: - """Generates a test function for blockchain tests for a specific fork.""" - - @pytest.mark.fork(fork_name) - @pytest.mark.json_blockchain_tests - @pytest.mark.parametrize( - "blockchain_test_case", - fetch_blockchain_tests(fork_name), - ids=idfn, - ) - def test_func(blockchain_test_case: Dict) -> None: - load = Load( - blockchain_test_case["json_fork"], - blockchain_test_case["eels_fork"], - ) - run_blockchain_st_test(blockchain_test_case, load=load) - - test_func.__name__ = f"test_blockchain_tests_{fork_name.lower()}" - return test_func - - -for fork_name in FORKS.keys(): - locals()[f"test_blockchain_tests_{fork_name.lower()}"] = ( - _generate_test_function(fork_name) - ) diff --git a/tests/json_infra/test_state_tests.py b/tests/json_infra/test_state_tests.py deleted file mode 100644 index 20bb578654..0000000000 --- a/tests/json_infra/test_state_tests.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Run the state tests from json fixtures.""" - -from typing import Callable, Dict - -import pytest - -from . import FORKS -from .helpers.load_state_tests import fetch_state_tests, idfn, run_state_test - - -def _generate_test_function(fork_name: str) -> Callable: - """Generates a test function for state tests for a specific fork.""" - - @pytest.mark.fork(fork_name) - @pytest.mark.evm_tools - @pytest.mark.json_state_tests - @pytest.mark.parametrize( - "state_test_case", - fetch_state_tests(fork_name), - ids=idfn, - ) - def test_func(state_test_case: Dict) -> None: - run_state_test(state_test_case) - - test_func.__name__ = f"test_state_tests_{fork_name.lower()}" - return test_func - - -for fork_name in FORKS.keys(): - locals()[f"test_state_tests_{fork_name.lower()}"] = ( - _generate_test_function(fork_name) - ) diff --git a/tox.ini b/tox.ini index e52433e286..14e3019322 100644 --- a/tox.ini +++ b/tox.ini @@ -48,14 +48,13 @@ commands = commands = pytest \ -m "not slow" \ - -n auto --maxprocesses 6 \ + -n auto --maxprocesses 6 --dist=loadfile \ --cov-config=pyproject.toml \ --cov=ethereum \ --cov-report=term \ --cov-report "xml:{toxworkdir}/coverage.xml" \ --no-cov-on-fail \ --cov-branch \ - --ignore-glob='tests/json_infra/fixtures/*' \ --basetemp="{temp_dir}/pytest" \ tests/json_infra @@ -99,8 +98,7 @@ passenv = commands = pytest \ -m "not slow and not evm_tools" \ - -n auto --maxprocesses 5 \ - --ignore-glob='tests/json_infra/fixtures/*' \ + -n auto --maxprocesses 5 --dist=loadfile \ --ignore-glob='tests/test_t8n.py' \ --ignore-glob='eest_tests/*' \ --basetemp="{temp_dir}/pytest" \