Skip to content

Commit

Permalink
Setup workflow test framework.
Browse files Browse the repository at this point in the history
  • Loading branch information
jmchilton committed Jul 29, 2024
1 parent fa9ca5c commit f6fcc95
Show file tree
Hide file tree
Showing 10 changed files with 333 additions and 46 deletions.
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Framework tests
name: Tool framework tests
on:
push:
paths-ignore:
Expand Down Expand Up @@ -64,9 +64,9 @@ jobs:
uses: actions/cache@v4
with:
path: 'galaxy root/.venv'
key: gxy-venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('galaxy root/requirements.txt') }}-framework
key: gxy-venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('galaxy root/requirements.txt') }}-framework-tools
- name: Run tests
run: ./run_tests.sh --coverage --framework
run: ./run_tests.sh --coverage --framework-tools
working-directory: 'galaxy root'
- uses: codecov/codecov-action@v3
with:
Expand Down
79 changes: 79 additions & 0 deletions .github/workflows/framework_workflows.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
name: Workflow framework tests
on:
push:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
pull_request:
paths-ignore:
- 'client/**'
- 'doc/**'
- 'lib/galaxy_test/selenium/**'
schedule:
# Run at midnight UTC every Tuesday
- cron: '0 0 * * 2'
env:
GALAXY_TEST_DBURI: 'postgresql://postgres:postgres@localhost:5432/galaxy?client_encoding=utf8'
GALAXY_TEST_RAISE_EXCEPTION_ON_HISTORYLESS_HDA: '1'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
name: Test
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.8']
services:
postgres:
image: postgres:13
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: postgres
ports:
- 5432:5432
steps:
- if: github.event_name == 'schedule'
run: |
echo "GALAXY_CONFIG_OVERRIDE_METADATA_STRATEGY=extended" >> $GITHUB_ENV
echo "GALAXY_CONFIG_OVERRIDE_OUTPUTS_TO_WORKING_DIRECTORY=true" >> $GITHUB_ENV
- uses: actions/checkout@v4
with:
path: 'galaxy root'
- uses: actions/setup-node@v4
with:
node-version: '18.12.1'
cache: 'yarn'
cache-dependency-path: 'galaxy root/client/yarn.lock'
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Get full Python version
id: full-python-version
shell: bash
run: echo "version=$(python -c 'import sys; print("-".join(str(v) for v in sys.version_info))')" >> $GITHUB_OUTPUT
- name: Cache pip dir
uses: actions/cache@v4
with:
path: ~/.cache/pip
key: pip-cache-${{ matrix.python-version }}-${{ hashFiles('galaxy root/requirements.txt') }}
- name: Cache galaxy venv
uses: actions/cache@v4
with:
path: 'galaxy root/.venv'
key: gxy-venv-${{ runner.os }}-${{ steps.full-python-version.outputs.version }}-${{ hashFiles('galaxy root/requirements.txt') }}-framework-workflows
- name: Run tests
run: ./run_tests.sh --coverage --framework-workflows
working-directory: 'galaxy root'
- uses: codecov/codecov-action@v3
with:
flags: framework
working-directory: 'galaxy root'
- uses: actions/upload-artifact@v4
if: failure()
with:
name: Framework test results (${{ matrix.python-version }})
path: 'galaxy root/run_framework_workflows_tests.html'
29 changes: 23 additions & 6 deletions lib/galaxy/tool_util/verify/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,9 @@
DEFAULT_METRIC,
DEFAULT_PIN_LABELS,
)
from galaxy.tool_util.parser.yaml import to_test_assert_list
from galaxy.util import unicodify
from galaxy.util.compression_utils import get_fileobj
from galaxy.tool_util.parser.yaml import to_test_assert_list
from .asserts import verify_assertions
from .test_data import TestDataResolver

Expand All @@ -58,7 +58,7 @@

DEFAULT_TEST_DATA_RESOLVER = TestDataResolver()
GetFilenameT = Optional[Callable[[str], str]]
GetLocationT = Optional[Callable[[str], bytes]]
GetLocationT = Optional[Callable[[str], str]]


def verify(
Expand Down Expand Up @@ -593,14 +593,30 @@ def files_image_diff(file1: str, file2: str, attributes: Optional[Dict[str, Any]
# TODO: After tool-util with this included is published, fefactor planemo.test._check_output
# to use this function. There is already a comment there about breaking fewer abstractions.
# https://github.com/galaxyproject/planemo/blob/master/planemo/test/_check_output.py
def verify_file_path_against_dict(get_filename: GetFilenameT, get_location: GetLocationT, path: str, output_content: bytes, test_properties, test_data_target_dir: Optional[str] = None) -> None:
def verify_file_path_against_dict(
get_filename: GetFilenameT,
get_location: GetLocationT,
path: str,
output_content: bytes,
test_properties,
test_data_target_dir: Optional[str] = None,
) -> None:
with open(path, "rb") as f:
output_content = f.read()
item_label = "Output with path %s" % path
verify_file_contents_against_dict(get_filename, get_location, item_label, output_content, test_properties, test_data_target_dir)
item_label = f"Output with path {path}"
verify_file_contents_against_dict(
get_filename, get_location, item_label, output_content, test_properties, test_data_target_dir
)


def verify_file_contents_against_dict(get_filename: GetFilenameT, get_location: GetLocationT, item_label: str, output_content: bytes, test_properties, test_data_target_dir: Optional[str] = None) -> None:
def verify_file_contents_against_dict(
get_filename: GetFilenameT,
get_location: GetLocationT,
item_label: str,
output_content: bytes,
test_properties,
test_data_target_dir: Optional[str] = None,
) -> None:
# Support Galaxy-like file location (using "file") or CWL-like ("path" or "location").
expected_file = test_properties.get("file", None)
if expected_file is None:
Expand All @@ -609,6 +625,7 @@ def verify_file_contents_against_dict(get_filename: GetFilenameT, get_location:
location = test_properties.get("location")
if location:
if location.startswith(("http://", "https://")):
assert get_location
expected_file = get_location(location)
else:
expected_file = location.split("file://", 1)[-1]
Expand Down
79 changes: 44 additions & 35 deletions lib/galaxy/tool_util/verify/interactor.py
Original file line number Diff line number Diff line change
Expand Up @@ -346,18 +346,7 @@ def _verify_metadata(self, history_id, hid, attributes):
`dbkey` and `tags` all map to the API description directly. Other metadata attributes
are assumed to be datatype-specific and mapped with a prefix of `metadata_`.
"""
metadata = attributes.get("metadata", {}).copy()
for key in metadata.copy().keys():
if key not in ["name", "info", "tags", "created_from_basename"]:
new_key = f"metadata_{key}"
metadata[new_key] = metadata[key]
del metadata[key]
elif key == "info":
metadata["misc_info"] = metadata["info"]
del metadata["info"]
expected_file_type = attributes.get("ftype", None)
if expected_file_type:
metadata["file_ext"] = expected_file_type
metadata = get_metadata_to_test(attributes)

if metadata:

Expand All @@ -370,29 +359,7 @@ def wait_for_content():
return None

dataset = wait_on(wait_for_content, desc="dataset metadata", timeout=10)

for key, value in metadata.items():
try:
dataset_value = dataset.get(key, None)

def compare(val, expected):
if str(val) != str(expected):
raise Exception(
f"Dataset metadata verification for [{key}] failed, expected [{value}] but found [{dataset_value}]. Dataset API value was [{dataset}]." # noqa: B023
)

if isinstance(dataset_value, list):
value = str(value).split(",")
if len(value) != len(dataset_value):
raise Exception(
f"Dataset metadata verification for [{key}] failed, expected [{value}] but found [{dataset_value}], lists differ in length. Dataset API value was [{dataset}]."
)
for val, expected in zip(dataset_value, value):
compare(val, expected)
else:
compare(dataset_value, value)
except KeyError:
raise Exception(f"Failed to verify dataset metadata, metadata key [{key}] was not found.")
compare_expected_metadata_to_api_response(metadata, dataset)

def wait_for_job(self, job_id: str, history_id: Optional[str] = None, maxseconds=DEFAULT_TOOL_TEST_WAIT) -> None:
self.wait_for(lambda: self.__job_ready(job_id, history_id), maxseconds=maxseconds)
Expand Down Expand Up @@ -1934,3 +1901,45 @@ def test_data_iter(required_files):
raise Exception(f"edit_attributes type ({edit_att.get('type', None)}) is unimplemented")

yield data_dict


def compare_expected_metadata_to_api_response(metadata: dict, dataset: dict) -> None:
for key, value in metadata.items():
try:
dataset_value = dataset.get(key, None)

def compare(val, expected):
if str(val) != str(expected):
raise Exception(
f"Dataset metadata verification for [{key}] failed, expected [{value}] but found [{dataset_value}]. Dataset API value was [{dataset}]." # noqa: B023
)

if isinstance(dataset_value, list):
value = str(value).split(",")
if len(value) != len(dataset_value):
raise Exception(
f"Dataset metadata verification for [{key}] failed, expected [{value}] but found [{dataset_value}], lists differ in length. Dataset API value was [{dataset}]."
)
for val, expected in zip(dataset_value, value):
compare(val, expected)
else:
compare(dataset_value, value)
except KeyError:
raise Exception(f"Failed to verify dataset metadata, metadata key [{key}] was not found.")


def get_metadata_to_test(test_properties: dict) -> dict:
"""Fetch out metadata to test from test property dict and adapt it to keys the API produces."""
metadata = test_properties.get("metadata", {}).copy()
for key in metadata.copy().keys():
if key not in ["name", "info", "tags", "created_from_basename"]:
new_key = f"metadata_{key}"
metadata[new_key] = metadata[key]
del metadata[key]
elif key == "info":
metadata["misc_info"] = metadata["info"]
del metadata["info"]
expected_file_type = test_properties.get("ftype", None)
if expected_file_type:
metadata["file_ext"] = expected_file_type
return metadata
3 changes: 2 additions & 1 deletion lib/galaxy_test/driver/driver_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -850,7 +850,8 @@ def _configure(self, config_object=None) -> None:

default_tool_conf: Optional[str]
datatypes_conf_override: Optional[str]
if getattr(config_object, "framework_tool_and_types", False):
framework_tools_and_types = getattr(config_object, "framework_tool_and_types", False)
if framework_tools_and_types:
default_tool_conf = FRAMEWORK_SAMPLE_TOOLS_CONF
datatypes_conf_override = FRAMEWORK_DATATYPES_CONF
else:
Expand Down
7 changes: 7 additions & 0 deletions lib/galaxy_test/workflow/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
"""Framework tests for checking workflow correctness and defining expected workflow behavior.
This is meant to grow into the workflow based mirror of what the framework tests are for tools.
api/test_workflows.py is still the place to test exceptional conditions, errors, etc... but tests
of normal operation where semantics can be verified with simple inputs and outputs can now be
places in here.
"""
22 changes: 22 additions & 0 deletions lib/galaxy_test/workflow/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import os

import pytest

from galaxy_test.driver.driver_util import GalaxyTestDriver


class ConfigObject:
framework_tool_and_types = True


@pytest.fixture(scope="session")
def real_driver():
if not os.environ.get("GALAXY_TEST_ENVIRONMENT_CONFIGURED"):
driver = GalaxyTestDriver()
driver.setup(ConfigObject)
try:
yield driver
finally:
driver.tear_down()
else:
yield None
Loading

0 comments on commit f6fcc95

Please sign in to comment.