Skip to content

Commit 2986b28

Browse files
Release 1.0.0-rc3 (#760)
* deploy: e5851e0 * deploy: fa0d20f --------- Co-authored-by: DriesSchaumont <[email protected]>
1 parent 44c832c commit 2986b28

File tree

635 files changed

+11038
-4123
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

635 files changed

+11038
-4123
lines changed

.github/workflows/main-build.yml

Lines changed: 21 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
name: Build
22
concurrency:
3-
group: ${{ github.workflow }}-${{ github.event.inputs.deploy_branch && format('{0}_build', github.ref_name) || github.event.inputs.deploy_branch }}
3+
group: ${{ github.workflow }}-${{ inputs.deploy_branch && inputs.deploy_branch || format('{0}_build', github.ref_name) }}
44
cancel-in-progress: true
55

66
on:
@@ -20,11 +20,10 @@ on:
2020
target_tag:
2121
type: string
2222
required: false
23-
default: main_build
2423
description: |
25-
Version tag of containers to use. Is `main_build` by default.
26-
Can be used in combination with 'push_containers' to re-use existing docker images
27-
or set the tag for new builds.
24+
Version tag of containers to use. Defaults to name of the branch that triggered the workflow,
25+
suffixed by "_build". Can be used in combination with 'push_containers' (by unchecking it)
26+
to re-use existing docker images or set the tag for new builds.
2827
deploy_to_viash_hub:
2928
type: boolean
3029
required: false
@@ -48,8 +47,10 @@ on:
4847
target_tag:
4948
type: string
5049
required: false
51-
default: main_build
52-
description: Version tag of existing containers to use. Is `main_build` by default.
50+
description: |
51+
Version tag of containers to use. Defaults to name of the branch that triggered the workflow,
52+
suffixed by "_build". Can be used in combination with 'push_containers' (by setting it to 'false')
53+
to re-use existing docker images or set the tag for new builds.
5354
deploy_branch:
5455
type: string
5556
required: false
@@ -95,6 +96,16 @@ jobs:
9596
DEPLOY_BRANCH: ${{ !inputs.deploy_branch && format('{0}_build', github.ref_name) || inputs.deploy_branch }}
9697

9798
steps:
99+
- name: Check input arguments
100+
run: |
101+
input_version="${{ inputs.version || format('{0}_build', github.ref_name) }}"
102+
target_tag="${{ github.event_name == 'push' && (inputs.version || format('{0}_build', github.ref_name)) || inputs.target_tag }}"
103+
should_push="${{inputs.push_containers }}"
104+
if [ "$input_version" != "$target_tag" ] && [ "$should_push" == "true" ]; then
105+
echo "When trying to push new docker images, the tag for the components must be equal to the target_tag for the docker images."
106+
exit 1
107+
fi
108+
98109
- name: Keep symlinks as-is
99110
run: |
100111
git config --global core.symlinks true
@@ -142,7 +153,7 @@ jobs:
142153
with:
143154
config_mod: |
144155
.functionality.version := "${{ inputs.version || format('{0}_build', github.ref_name) }}"
145-
.platforms[.type == 'docker'].target_tag := '${{ github.event_name == 'push' && 'main_build' || inputs.target_tag }}'
156+
.platforms[.type == 'docker'].target_tag := '${{ github.event_name == 'push' && (inputs.version || format('{0}_build', github.ref_name)) || inputs.target_tag }}'
146157
parallel: true
147158
query: ^(?!workflows)
148159

@@ -246,7 +257,7 @@ jobs:
246257
with:
247258
config_mod: |
248259
.functionality.version := " ${{ !inputs.deploy_branch && format('{0}_build', github.ref_name) || inputs.deploy_branch }}"
249-
.platforms[.type == 'docker'].target_tag := '${{ github.event_name == 'push' && 'main_build' || inputs.target_tag }}'
260+
.platforms[.type == 'docker'].target_tag := '${{ github.event_name == 'push' && (inputs.version || format('{0}_build', github.ref_name)) || inputs.target_tag }}'
250261
.platforms[.type == 'docker'].target_organization := 'openpipelines-bio/openpipeline'
251262
.platforms[.type == 'docker'].target_registry := 'viash-hub.com:5050'
252263
.platforms[.type == 'docker'].target_image_source := 'https://viash-hub.com/openpipelines-bio/openpipeline'
@@ -335,7 +346,7 @@ jobs:
335346
if: ${{ github.event_name == 'push' || inputs.deploy_to_viash_hub }}
336347
run: |
337348
viash ns exec -s ${{ matrix.component.dir }} --apply_platform -p docker \
338-
'docker tag ghcr.io/openpipelines-bio/{namespace}_{functionality-name}:${{ github.event_name == 'push' && 'main_build' || inputs.target_tag }} viash-hub.com:5050/openpipelines-bio/openpipeline/{namespace}_{functionality-name}:${{ inputs.version || format('{0}_build', github.ref_name) }}'
349+
'docker tag ghcr.io/openpipelines-bio/{namespace}${{matrix.component.namespace_separator}}{functionality-name}:${{ github.event_name == 'push' && (inputs.version || format('{0}_build', github.ref_name)) || inputs.target_tag }} viash-hub.com:5050/openpipelines-bio/openpipeline/{namespace}${{matrix.component.namespace_separator}}{functionality-name}:${{ inputs.version || format('{0}_build', github.ref_name) }}'
339350
340351
- name: Push container to Viash-Hub
341352
if: ${{ github.event_name == 'push' || inputs.deploy_to_viash_hub }}

.github/workflows/release-build.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ jobs:
158158
run: |
159159
viash test \
160160
"${{ matrix.component.config }}" \
161-
--config_mod ".platforms[.type == 'docker'].image := 'ghcr.io/openpipelines-bio/${{ matrix.component.namespace }}_${{ matrix.component.name }}:${{ github.event.inputs.version_tag }}'" \
161+
--config_mod ".platforms[.type == 'docker'].image := 'ghcr.io/openpipelines-bio/${{ matrix.component.namespace }}${{matrix.component.namespace_separator}}${{ matrix.component.name }}:${{ github.event.inputs.version_tag }}'" \
162162
--config_mod ".platforms[.type == 'docker'].setup := []" \
163-
--cpus 2 \
164-
--memory "6gb"
163+
--cpus 4 \
164+
--memory "12gb"

.github/workflows/viash-test.yml

Lines changed: 2 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -54,13 +54,6 @@ jobs:
5454
dest_path: resources_test
5555
cache_key_prefix: resources_test__
5656

57-
- name: Get changed files
58-
id: changed-files
59-
uses: tj-actions/changed-files@v42
60-
with:
61-
separator: ";"
62-
diff_relative: true
63-
6457
- id: ns_list
6558
uses: viash-io/viash-actions/ns-list@v5
6659
with:
@@ -174,6 +167,6 @@ jobs:
174167
run: |
175168
viash test \
176169
"${{ matrix.component.config }}" \
177-
--cpus 2 \
178-
--memory "6gb"
170+
--cpus 4 \
171+
--memory "12gb"
179172

CHANGELOG.md

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,20 @@
1+
# openpipelines 1.0.0-rc3
2+
3+
## BREAKING CHANGES
4+
5+
* Docker image names now use `/` instead of `_` between the name of the component and the namespace (PR #712).
6+
7+
## BUG FIXES
8+
9+
* `rna_singlesample`: fixed a bug where selecting the column for the filtering with mitochondrial fractions
10+
using `obs_name_mitochondrial_fraction` was done with the wrong column name, causing `ValueError` (PR #743).
11+
12+
* Fix publishing in `process_samples` and `process_batches` (PR #759).
13+
14+
## NEW FUNCTIONALITY
15+
16+
* `dimred/tsne` component: Added a tSNE dimensionality reduction component (PR #742).
17+
118
# openpipelines 1.0.0-rc2
219

320
## BUG FIXES

_viash.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ config_mods: |
99
.functionality.arguments[.multiple == true].multiple_sep := ";"
1010
.functionality.argument_groups[true].arguments[.multiple == true].multiple_sep := ";"
1111
.functionality.test_resources += {path: 'src/base/openpipelinetestutils', dest: 'openpipelinetestutils'}
12-
.platforms[.type == 'docker'].namespace_separator := '_'
12+
.platforms[.type == 'docker'].namespace_separator := '/'
1313
.platforms[.type == 'docker'].target_registry := 'ghcr.io'
1414
.platforms[.type == 'docker'].target_organization := 'openpipelines-bio'
1515
.platforms[.type == 'docker'].target_image_source := 'https://github.com/openpipelines-bio/openpipeline'

src/authors/jakub_majercik.yaml

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
name: Jakub Majercik
2+
info:
3+
role: Contributor
4+
links:
5+
6+
github: jakubmajercik
7+
linkedin: jakubmajercik
8+
organizations:
9+
- name: Data Intuitive
10+
href: https://www.data-intuitive.com
11+
role: Bioinformatics Engineer

src/base/openpipelinetestutils/asserters.py

Lines changed: 50 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2,17 +2,18 @@
22
import anndata
33
import pandas as pd
44
import numpy as np
5-
from scipy.sparse import issparse
5+
from scipy.sparse import issparse, spmatrix
66
from mudata import MuData
77
from pathlib import Path
88
from pandas.testing import assert_frame_equal
99
from typing import Literal
1010
from .typing import AnnotationObjectOrPathLike
11+
from functools import singledispatch
1112

1213

1314
def _read_if_needed(anndata_mudata_path_or_obj):
1415
if isinstance(anndata_mudata_path_or_obj, (str, Path)):
15-
return mudata.read(anndata_mudata_path_or_obj)
16+
return mudata.read(str(anndata_mudata_path_or_obj)) # TODO: remove when mudata fixes PAth bug
1617
if isinstance(anndata_mudata_path_or_obj, (mudata.MuData, anndata.AnnData)):
1718
return anndata_mudata_path_or_obj.copy()
1819
raise AssertionError("Expected 'Path', 'str' to MuData/AnnData "
@@ -64,6 +65,12 @@ def assert_var_names_equal(left: AnnotationObjectOrPathLike, right: AnnotationOb
6465
assert_var_names_equal(modality, right[mod_name])
6566

6667

68+
def _assert_frame_equal(left, right, sort=False, *args, **kwargs):
69+
if sort:
70+
left, right = left.sort_index(inplace=False), right.sort_index(inplace=False)
71+
left, right = left.sort_index(axis=1, inplace=False), right.sort_index(axis=1, inplace=False)
72+
assert_frame_equal(left, right, *args, **kwargs)
73+
6774
def assert_annotation_frame_equal(annotation_attr: Literal["obs", "var"],
6875
left: AnnotationObjectOrPathLike, right: AnnotationObjectOrPathLike,
6976
sort=False, *args, **kwargs):
@@ -72,9 +79,7 @@ def assert_annotation_frame_equal(annotation_attr: Literal["obs", "var"],
7279
left, right = _read_if_needed(left), _read_if_needed(right)
7380
_assert_same_annotation_object_class(left, right)
7481
left_frame, right_frame = getattr(left, annotation_attr), getattr(right, annotation_attr)
75-
if sort:
76-
left_frame, right_frame = left_frame.sort_index(inplace=False), right_frame.sort_index(inplace=False)
77-
assert_frame_equal(left_frame, right_frame, *args, **kwargs)
82+
_assert_frame_equal(left_frame, right_frame, sort=sort, *args, **kwargs)
7883
if isinstance(left, MuData):
7984
assert_mudata_modality_keys_equal(left, right)
8085
for mod_name, modality in left.mod.items():
@@ -123,13 +128,49 @@ def assert_layers_equal(left: AnnotationObjectOrPathLike,
123128
assert_layers_equal(modality, right[mod_name])
124129

125130

131+
132+
def assert_multidimensional_annotation_equal(annotation_attr: Literal["obsm", "varm"],
133+
left, right, sort=False):
134+
if not annotation_attr in ("obsm", "varm"):
135+
raise ValueError("annotation_attr should be 'obsm', or 'varm'")
136+
left, right = _read_if_needed(left), _read_if_needed(right)
137+
_assert_same_annotation_object_class(left, right)
138+
139+
@singledispatch
140+
def _assert_multidimensional_value_equal(left, right, **kwargs):
141+
raise NotImplementedError("Unregistered type found while asserting")
142+
143+
@_assert_multidimensional_value_equal.register
144+
def _(left: pd.DataFrame, right, **kwargs):
145+
_assert_frame_equal(left, right, **kwargs)
146+
147+
@_assert_multidimensional_value_equal.register(np.ndarray)
148+
@_assert_multidimensional_value_equal.register(spmatrix)
149+
def _(left, right, **kwargs):
150+
# Cannot sort sparse and dense matrices so ignore sort param
151+
_assert_layer_equal(left, right)
152+
153+
left_dict, right_dict = getattr(left, annotation_attr), getattr(right, annotation_attr)
154+
left_keys, right_keys = left_dict.keys(), right_dict.keys()
155+
assert left_keys == right_keys, f"Keys of {annotation_attr} differ:\n[left]:{left_keys}\n[right]:{right_keys}"
156+
for left_key, left_value in left_dict.items():
157+
_assert_multidimensional_value_equal(left_value, right_dict[left_key], sort=sort)
158+
if isinstance(left, MuData):
159+
assert_mudata_modality_keys_equal(left, right)
160+
for mod_name, modality in left.mod.items():
161+
assert_multidimensional_annotation_equal(annotation_attr ,modality, right[mod_name], sort=sort)
162+
163+
126164
def assert_annotation_objects_equal(left: AnnotationObjectOrPathLike,
127165
right: AnnotationObjectOrPathLike,
128-
check_data=True):
166+
check_data=True,
167+
sort=True):
129168
left, right = _read_if_needed(left), _read_if_needed(right)
130169
_assert_same_annotation_object_class(left, right)
131170
assert_shape_equal(left, right)
132-
assert_annotation_frame_equal("obs", left, right)
133-
assert_annotation_frame_equal("var", left, right)
171+
assert_annotation_frame_equal("obs", left, right, sort=sort)
172+
assert_annotation_frame_equal("var", left, right, sort=sort)
173+
assert_multidimensional_annotation_equal("varm", left, right, sort=sort)
174+
assert_multidimensional_annotation_equal("obsm", left, right, sort=sort)
134175
if check_data:
135-
assert_layers_equal(left, right)
176+
assert_layers_equal(left, right)

src/base/openpipelinetestutils/fixtures.py

Lines changed: 41 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
11
from uuid import uuid4
22
import pytest
3+
import pandas as pd
4+
import anndata as ad
5+
import mudata as md
36

47
@pytest.fixture
58
def random_path(tmp_path):
@@ -20,4 +23,41 @@ def wrapper(mudata_obj):
2023
output_path = random_h5mu_path()
2124
mudata_obj.write(output_path)
2225
return output_path
23-
return wrapper
26+
return wrapper
27+
28+
@pytest.fixture
29+
def small_anndata_1():
30+
df = pd.DataFrame([[1, 2, 3], [4, 5, 6]], index=["obs1", "obs2"], columns=["var1", "var2", "var3"])
31+
obs = pd.DataFrame([["A"], ["B"]], index=df.index, columns=["Obs"])
32+
var = pd.DataFrame([["a"], ["b"], ["c"]], index=df.columns, columns=["Feat"])
33+
ad1 = ad.AnnData(df, obs=obs, var=var)
34+
return ad1
35+
36+
@pytest.fixture
37+
def small_anndata_2():
38+
df = pd.DataFrame([[1, 2, 3], [4, 5, 6]], index=["obs1", "obs2"], columns=["var4", "var5", "var6"])
39+
obs2 = pd.DataFrame(["C", "D"], index=df.index, columns=["Obs"])
40+
var2 = pd.DataFrame(["d", "e", "g"], index=df.columns, columns=["Feat"])
41+
ad2 = ad.AnnData(df, obs=obs2, var=var2)
42+
return ad2
43+
44+
@pytest.fixture
45+
def small_mudata(small_anndata_1, small_anndata_2):
46+
return md.MuData({'mod1': small_anndata_1, 'mod2': small_anndata_2})
47+
48+
@pytest.fixture
49+
def small_mudata_path(small_mudata, write_mudata_to_file):
50+
return write_mudata_to_file(small_mudata)
51+
52+
@pytest.fixture
53+
def split_small_mudata_path(small_mudata_mod1_path, small_mudata_mod2_path):
54+
return small_mudata_mod1_path, small_mudata_mod2_path
55+
56+
@pytest.fixture
57+
def small_mudata_mod1_path(small_mudata, write_mudata_to_file):
58+
return write_mudata_to_file(md.MuData({'mod1': small_mudata.mod['mod1']}))
59+
60+
@pytest.fixture
61+
def small_mudata_mod2_path(small_mudata, write_mudata_to_file):
62+
return write_mudata_to_file(md.MuData({'mod2': small_mudata.mod['mod2']}))
63+

src/convert/from_h5ad_to_h5mu/config.vsh.yaml

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -48,9 +48,7 @@ platforms:
4848
- procps
4949
- type: python
5050
__merge__: /src/base/requirements/anndata_mudata.yaml
51-
test_setup:
52-
- type: python
53-
__merge__: [ /src/base/requirements/viashpy.yaml, .]
51+
__merge__: [ /src/base/requirements/python_test_setup.yaml, .]
5452
- type: nextflow
5553
directives:
5654
label: [lowmem, singlecpu]
Lines changed: 15 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,31 +1,33 @@
11
import sys
22
import pytest
33
import mudata as mu
4+
from openpipelinetestutils.asserters import assert_annotation_objects_equal
45

56
## VIASH START
67
meta = {
7-
'resources_dir': 'resources_test'
8+
'resources_dir': 'resources_test',
9+
'executable': './target/docker/convert/from_h5ad_to_h5mu/from_h5ad_to_h5mu',
10+
'config': './src/convert/from_h5ad_to_h5mu/config.vsh.yaml'
811
}
912
## VIASH END
1013

1114
input = meta["resources_dir"] + "/pbmc_1k_protein_v3/pbmc_1k_protein_v3_mms.h5mu"
1215

13-
def test_run(run_component, tmp_path):
16+
def test_run(run_component, random_h5mu_path, random_path):
1417
mdata = mu.read_h5mu(input)
15-
16-
tmp_rna = tmp_path / "rna.h5ad"
17-
tmp_prot = tmp_path / "prot.h5ad"
18+
tmp_rna = random_path(extension="h5ad")
19+
tmp_prot = random_path(extension="h5ad")
1820
mdata.mod["rna"].write_h5ad(tmp_rna)
1921
mdata.mod["prot"].write_h5ad(tmp_prot)
2022

21-
tmp_output = tmp_path / "output.h5mu"
23+
tmp_output = random_h5mu_path()
2224

2325
cmd_pars = [
2426
"--modality", "rna",
25-
"--input", str(tmp_rna),
27+
"--input", tmp_rna,
2628
"--modality", "prot",
27-
"--input", str(tmp_prot),
28-
"--output", str(tmp_output),
29+
"--input", tmp_prot,
30+
"--output", tmp_output,
2931
"--output_compression", "gzip"
3032
]
3133
run_component(cmd_pars)
@@ -34,8 +36,10 @@ def test_run(run_component, tmp_path):
3436

3537
mdata2 = mu.read_h5mu(tmp_output)
3638

37-
assert "rna" in mdata2.mod, "Resulting mudata should contain rna modality"
38-
assert "prot" in mdata2.mod, "Resulting mudata should contain rna modality"
39+
assert list(mdata2.mod.keys()) == ["rna", "prot"]
40+
41+
assert_annotation_objects_equal(mdata2.mod["rna"], tmp_rna)
42+
assert_annotation_objects_equal(mdata2.mod["prot"], tmp_prot)
3943

4044
if __name__ == "__main__":
4145
sys.exit(pytest.main([__file__]))

0 commit comments

Comments
 (0)