From 1e02b74d976dfd1cf92eca8be790bb5e7ae2163f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 10 Feb 2025 16:45:21 +0000 Subject: [PATCH 01/56] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.9.4 → v0.9.6](https://github.com/astral-sh/ruff-pre-commit/compare/v0.9.4...v0.9.6) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 371d05f9..49f60d2a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -23,7 +23,7 @@ repos: args: [--branch, main, --branch, dev] - id: check-added-large-files - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.9.4 + rev: v0.9.6 hooks: - id: ruff args: [ --fix, --exit-non-zero-on-fix, "--ignore=C901" ] From 37707e3d02e9ec10cf8809df5f460d7607112c8f Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 11 Feb 2025 13:07:05 +0000 Subject: [PATCH 02/56] Added offline user alert set status test --- tests/unit/test_user_alert.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/tests/unit/test_user_alert.py b/tests/unit/test_user_alert.py index 71d08336..a819df15 100644 --- a/tests/unit/test_user_alert.py +++ b/tests/unit/test_user_alert.py @@ -191,13 +191,28 @@ def test_user_alert_status_offline() -> None: _run.alerts = [_alert.id] _run.commit() - sender(_alert._local_staging_file.parents[1], 1, 10, ["folders", "runs", "alerts"]) + _id_mapping = sender(_alert._local_staging_file.parents[1], 1, 10, ["folders", "runs", "alerts"]) time.sleep(1) + + # Get online aler, check status is not set + _online_alert = UserAlert(_id_mapping.get(_alert.id)) + assert not _online_alert.get_status(run_id=_id_mapping.get(_run.id)) _alert.set_status(_run.id, "critical") _alert.commit() - import pdb; pdb.set_trace() time.sleep(1) + + # Check online status is still not set as change has not been sent + _online_alert.refresh() + assert not _online_alert.get_status(run_id=_id_mapping.get(_run.id)) + + sender(_alert._local_staging_file.parents[1], 1, 10, ["alerts"]) + time.sleep(1) + + # Check online status has been updated + _online_alert.refresh() + assert _online_alert.get_status(run_id=_id_mapping.get(_run.id)) == "critical" + _run.delete() _folder.delete(recursive=True, runs_only=False, delete_runs=True) _alert.delete() From 5cc27c0bb4e901e1a91a327a22420b6051899608 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 11 Feb 2025 13:35:56 +0000 Subject: [PATCH 03/56] Added offline events test --- tests/unit/test_events.py | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/tests/unit/test_events.py b/tests/unit/test_events.py index 205643e6..fb5d0587 100644 --- a/tests/unit/test_events.py +++ b/tests/unit/test_events.py @@ -7,6 +7,7 @@ from simvue.api.objects import Events, Folder, Run from simvue.models import DATETIME_FORMAT +from simvue.sender import sender @pytest.mark.api @pytest.mark.online @@ -30,3 +31,39 @@ def test_events_creation_online() -> None: _run.delete() _folder.delete(recursive=True, delete_runs=True, runs_only=False) +@pytest.mark.api +@pytest.mark.offline +def test_events_creation_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name, offline=True) + _run = Run.new(folder=_folder_name, offline=True) + _folder.commit() + _run.commit() + _timestamp = datetime.datetime.now().strftime(DATETIME_FORMAT) + _events = Events.new( + run=_run.id, + events=[ + {"message": "This is a test!", "timestamp": _timestamp} + ], + offline=True + ) + _events.commit() + with _events._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + + assert _local_data.get("run") == _run.id + assert _local_data.get("events")[0].get("message") == "This is a test!" + assert _local_data.get("events")[0].get("timestamp") == _timestamp + + _id_mapping = sender(_events._local_staging_file.parents[1], 1, 10, ["folders", "runs", "events"]) + time.sleep(1) + + # Get online version of events + _online_events = Events(_id_mapping.get(_events.id)) + _event_content = next(_online_events.get(run_id=_id_mapping.get(_run.id))) + assert _event_content.message == "This is a test!" + assert _event_content.timestamp == _timestamp + + _run.delete() + _folder.delete(recursive=True, delete_runs=True, runs_only=False) \ No newline at end of file From 860232e3ef66b147d1ae6c4710d8d0a235528985 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 11 Feb 2025 14:53:34 +0000 Subject: [PATCH 04/56] Added offline metrics test --- simvue/api/objects/metrics.py | 28 ++++++++---------- tests/unit/test_metrics.py | 55 ++++++++++++++++++++++++++++++++++- 2 files changed, 66 insertions(+), 17 deletions(-) diff --git a/simvue/api/objects/metrics.py b/simvue/api/objects/metrics.py index de5d3075..43b75561 100644 --- a/simvue/api/objects/metrics.py +++ b/simvue/api/objects/metrics.py @@ -37,7 +37,7 @@ def __init__( def new( cls, *, run: str, offline: bool = False, metrics: list[MetricSet], **kwargs ): - """Create a new Events entry on the Simvue server""" + """Create a new Metrics entry on the Simvue server""" return Metrics( run=run, metrics=[metric.model_dump() for metric in metrics], @@ -51,27 +51,23 @@ def get( cls, metrics: list[str], xaxis: typing.Literal["timestamp", "step", "time"], + runs: list[str], *, count: pydantic.PositiveInt | None = None, offset: pydantic.PositiveInt | None = None, **kwargs, ) -> typing.Generator[MetricSet, None, None]: _class_instance = cls(_read_only=True, _local=True) - if ( - _data := cls._get_all_objects( - count, - offset, - metrics=json.dumps(metrics), - xaxis=xaxis, - **kwargs, - ).get("data") - ) is None: - raise RuntimeError( - f"Expected key 'data' for retrieval of {_class_instance.__class__.__name__.lower()}s" - ) - - for _entry in _data: - yield MetricSet(**_entry) + _data = cls._get_all_objects( + count, + offset, + metrics=json.dumps(metrics), + runs=json.dumps(runs), + xaxis=xaxis, + **kwargs, + ) + # TODO: Temp fix, just return the dictionary. Not sure what format we really want this in... + return _data @pydantic.validate_call def span(self, run_ids: list[str]) -> dict[str, int | float]: diff --git a/tests/unit/test_metrics.py b/tests/unit/test_metrics.py index d8432fc0..5d8b7575 100644 --- a/tests/unit/test_metrics.py +++ b/tests/unit/test_metrics.py @@ -6,6 +6,8 @@ import uuid from simvue.api.objects import Metrics, Folder, Run +from simvue.models import DATETIME_FORMAT +from simvue.sender import sender @pytest.mark.api @pytest.mark.online @@ -38,9 +40,60 @@ def test_metrics_creation_online() -> None: ) assert _metrics.to_dict() _metrics.commit() - assert _metrics.get(metrics=["x", "y", "z"], xaxis="step") + assert _metrics.get(metrics=["x", "y", "z"], xaxis="step", runs=[_run.id]) assert _metrics.span(run_ids=[_run.id]) assert _metrics.names(run_ids=[_run.id]) _run.delete() _folder.delete(recursive=True, delete_runs=True, runs_only=False) +@pytest.mark.api +@pytest.mark.offline +def test_metrics_creation_offline() -> None: + _uuid: str = f"{uuid.uuid4()}".split("-")[0] + _folder_name = f"/simvue_unit_testing/{_uuid}" + _folder = Folder.new(path=_folder_name, offline=True) + _run = Run.new(name="hello", folder=_folder_name, offline=True) + _folder.commit() + _run.commit() + + _values = { + "x": 1, + "y": 2.0, + "z": True + } + _time: int = 1 + _step: int = 1 + _timestamp = datetime.datetime.now().strftime(DATETIME_FORMAT) + _metrics = Metrics.new( + run=_run.id, + metrics=[ + { + "timestamp": _timestamp, + "time": _time, + "step": _step, + "values": _values, + } + ], + offline=True + ) + _metrics.commit() + with _metrics._local_staging_file.open() as in_f: + _local_data = json.load(in_f) + + assert _local_data.get("run") == _run.id + assert _local_data.get("metrics")[0].get("values") == _values + assert _local_data.get("metrics")[0].get("timestamp") == _timestamp + assert _local_data.get("metrics")[0].get("step") == _step + assert _local_data.get("metrics")[0].get("time") == _time + + _id_mapping = sender(_metrics._local_staging_file.parents[1], 1, 10, ["folders", "runs", "metrics"]) + time.sleep(1) + + # Get online version of metrics + _online_metrics = Metrics(_id_mapping.get(_metrics.id)) + _data = _online_metrics.get(metrics=["x", "y", "z"], runs=[_id_mapping.get(_run.id)], xaxis="step") + assert sorted(_online_metrics.names(run_ids=[_id_mapping.get(_run.id)])) == sorted(_values.keys()) + assert _data.get(_id_mapping.get(_run.id)).get('y')[0].get('value') == 2.0 + assert _data.get(_id_mapping.get(_run.id)).get('y')[0].get('step') == 1 + _run.delete() + _folder.delete(recursive=True, delete_runs=True, runs_only=False) \ No newline at end of file From b717d99036c4c5c588e37c14c77a7480e084747d Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 11 Feb 2025 16:09:47 +0000 Subject: [PATCH 05/56] Added small sleep to fix test --- tests/functional/test_dispatch.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/functional/test_dispatch.py b/tests/functional/test_dispatch.py index de77ccb2..e2ab6362 100644 --- a/tests/functional/test_dispatch.py +++ b/tests/functional/test_dispatch.py @@ -67,9 +67,10 @@ def callback(___: list[typing.Any], _: str, args=check_dict, var=variable) -> No event.set() dispatcher.join() + time.sleep(0.1) for variable in variables: - assert check_dict[variable]["counter"] >= 2 if overload_buffer else 1, f"Check of counter for dispatcher '{variable}' failed with count = {check_dict[variable]['counter']}" + assert check_dict[variable]["counter"] >= (2 if overload_buffer else 1), f"Check of counter for dispatcher '{variable}' failed with count = {check_dict[variable]['counter']}" assert time.time() - start_time < time_threshold From 2b03ed18894cd41870d5b410cbf0d6ee2da0e035 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 12 Feb 2025 11:11:45 +0000 Subject: [PATCH 06/56] Fixed missing start and end times --- simvue/run.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/simvue/run.py b/simvue/run.py index af7a5aa0..f67117b5 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -465,12 +465,13 @@ def _start(self, reconnect: bool = False) -> bool: logger.debug("Starting run") + self._start_time = time.time() + if self._sv_obj: self._sv_obj.status = self._status + self._sv_obj.started = self._start_time self._sv_obj.commit() - self._start_time = time.time() - if self._pid == 0: self._pid = os.getpid() @@ -1459,7 +1460,7 @@ def set_status( ) -> bool: """Set run status - status to assign to this run + status to assign to this run once finished Parameters ---------- @@ -1479,6 +1480,7 @@ def set_status( if self._sv_obj: self._sv_obj.status = status + self._sv_obj.endtime = time.time() self._sv_obj.commit() return True From 15314973c8175c772594ab229cf5a896a9b2cd1c Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 12 Feb 2025 11:45:53 +0000 Subject: [PATCH 07/56] Fixed offline file artifact path --- simvue/api/objects/artifact/file.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/simvue/api/objects/artifact/file.py b/simvue/api/objects/artifact/file.py index 7878d514..54f0a8cb 100644 --- a/simvue/api/objects/artifact/file.py +++ b/simvue/api/objects/artifact/file.py @@ -52,14 +52,15 @@ def new( if _mime_type not in get_mimetypes(): raise ValueError(f"Invalid MIME type '{mime_type}' specified") - file_path = pathlib.Path(file_path) - _file_size = file_path.stat().st_size - _file_orig_path = file_path.expanduser().absolute() - _file_checksum = calculate_sha256(f"{file_path}", is_file=True) - kwargs.pop("original_path", None) - kwargs.pop("size", None) - kwargs.pop("checksum", None) + if _file_orig_path := kwargs.pop("original_path", None): + _file_size = kwargs.pop("size") + _file_checksum = kwargs.pop("checksum") + else: + file_path = pathlib.Path(file_path) + _file_size = file_path.stat().st_size + _file_orig_path = file_path.expanduser().absolute() + _file_checksum = calculate_sha256(f"{file_path}", is_file=True) _artifact = FileArtifact( name=name, From c1ab236fa94126647c49070feddb386f9cbbeaa3 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 12 Feb 2025 13:16:31 +0000 Subject: [PATCH 08/56] Loosen numpy requirement --- poetry.lock | 108 ++++++++++++++++++++++--------------------------- pyproject.toml | 2 +- 2 files changed, 49 insertions(+), 61 deletions(-) diff --git a/poetry.lock b/poetry.lock index aa295ce7..ed8390cd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -505,7 +505,6 @@ files = [ {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:761817a3377ef15ac23cd7834715081791d4ec77f9297ee694ca1ee9c2c7e5eb"}, {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3c672a53c0fb4725a29c303be906d3c1fa99c32f58abe008a82705f9ee96f40b"}, {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4ac4c9f37eba52cb6fbeaf5b59c152ea976726b865bd4cf87883a7e7006cc543"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:60eb32934076fa07e4316b7b2742fa52cbb190b42c2df2863dbc4230a0a9b385"}, {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ed3534eb1090483c96178fcb0f8893719d96d5274dfde98aa6add34614e97c8e"}, {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f3f6fdfa89ee2d9d496e2c087cebef9d4fcbb0ad63c40e821b39f74bf48d9c5e"}, {file = "cryptography-44.0.0-cp37-abi3-win32.whl", hash = "sha256:eb33480f1bad5b78233b0ad3e1b0be21e8ef1da745d8d2aecbb20671658b9053"}, @@ -516,7 +515,6 @@ files = [ {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c5eb858beed7835e5ad1faba59e865109f3e52b3783b9ac21e7e47dc5554e289"}, {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f53c2c87e0fb4b0c00fa9571082a057e37690a8f12233306161c8f4b819960b7"}, {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9e6fc8a08e116fb7c7dd1f040074c9d7b51d74a8ea40d4df2fc7aa08b76b9e6c"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:9abcc2e083cbe8dde89124a47e5e53ec38751f0d7dfd36801008f316a127d7ba"}, {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d2436114e46b36d00f8b72ff57e598978b37399d2786fd39793c36c6d5cb1c64"}, {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a01956ddfa0a6790d594f5b34fc1bfa6098aca434696a03cfdbe469b8ed79285"}, {file = "cryptography-44.0.0-cp39-abi3-win32.whl", hash = "sha256:eca27345e1214d1b9f9490d200f9db5a874479be914199194e746c893788d417"}, @@ -1329,68 +1327,58 @@ pyspark = ["pyspark (>=3.5.0)"] [[package]] name = "numpy" -version = "2.2.2" +version = "2.0.2" description = "Fundamental package for array computing in Python" optional = false -python-versions = ">=3.10" +python-versions = ">=3.9" groups = ["main"] markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ - {file = "numpy-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7079129b64cb78bdc8d611d1fd7e8002c0a2565da6a47c4df8062349fee90e3e"}, - {file = "numpy-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ec6c689c61df613b783aeb21f945c4cbe6c51c28cb70aae8430577ab39f163e"}, - {file = "numpy-2.2.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:40c7ff5da22cd391944a28c6a9c638a5eef77fcf71d6e3a79e1d9d9e82752715"}, - {file = "numpy-2.2.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:995f9e8181723852ca458e22de5d9b7d3ba4da3f11cc1cb113f093b271d7965a"}, - {file = "numpy-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b78ea78450fd96a498f50ee096f69c75379af5138f7881a51355ab0e11286c97"}, - {file = "numpy-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3fbe72d347fbc59f94124125e73fc4976a06927ebc503ec5afbfb35f193cd957"}, - {file = "numpy-2.2.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8e6da5cffbbe571f93588f562ed130ea63ee206d12851b60819512dd3e1ba50d"}, - {file = "numpy-2.2.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:09d6a2032faf25e8d0cadde7fd6145118ac55d2740132c1d845f98721b5ebcfd"}, - {file = "numpy-2.2.2-cp310-cp310-win32.whl", hash = "sha256:159ff6ee4c4a36a23fe01b7c3d07bd8c14cc433d9720f977fcd52c13c0098160"}, - {file = "numpy-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:64bd6e1762cd7f0986a740fee4dff927b9ec2c5e4d9a28d056eb17d332158014"}, - {file = "numpy-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:642199e98af1bd2b6aeb8ecf726972d238c9877b0f6e8221ee5ab945ec8a2189"}, - {file = "numpy-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6d9fc9d812c81e6168b6d405bf00b8d6739a7f72ef22a9214c4241e0dc70b323"}, - {file = "numpy-2.2.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:c7d1fd447e33ee20c1f33f2c8e6634211124a9aabde3c617687d8b739aa69eac"}, - {file = "numpy-2.2.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:451e854cfae0febe723077bd0cf0a4302a5d84ff25f0bfece8f29206c7bed02e"}, - {file = "numpy-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd249bc894af67cbd8bad2c22e7cbcd46cf87ddfca1f1289d1e7e54868cc785c"}, - {file = "numpy-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02935e2c3c0c6cbe9c7955a8efa8908dd4221d7755644c59d1bba28b94fd334f"}, - {file = "numpy-2.2.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a972cec723e0563aa0823ee2ab1df0cb196ed0778f173b381c871a03719d4826"}, - {file = "numpy-2.2.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d6d6a0910c3b4368d89dde073e630882cdb266755565155bc33520283b2d9df8"}, - {file = "numpy-2.2.2-cp311-cp311-win32.whl", hash = "sha256:860fd59990c37c3ef913c3ae390b3929d005243acca1a86facb0773e2d8d9e50"}, - {file = "numpy-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:da1eeb460ecce8d5b8608826595c777728cdf28ce7b5a5a8c8ac8d949beadcf2"}, - {file = "numpy-2.2.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ac9bea18d6d58a995fac1b2cb4488e17eceeac413af014b1dd26170b766d8467"}, - {file = "numpy-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23ae9f0c2d889b7b2d88a3791f6c09e2ef827c2446f1c4a3e3e76328ee4afd9a"}, - {file = "numpy-2.2.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:3074634ea4d6df66be04f6728ee1d173cfded75d002c75fac79503a880bf3825"}, - {file = "numpy-2.2.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:8ec0636d3f7d68520afc6ac2dc4b8341ddb725039de042faf0e311599f54eb37"}, - {file = "numpy-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ffbb1acd69fdf8e89dd60ef6182ca90a743620957afb7066385a7bbe88dc748"}, - {file = "numpy-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0349b025e15ea9d05c3d63f9657707a4e1d471128a3b1d876c095f328f8ff7f0"}, - {file = "numpy-2.2.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:463247edcee4a5537841d5350bc87fe8e92d7dd0e8c71c995d2c6eecb8208278"}, - {file = "numpy-2.2.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9dd47ff0cb2a656ad69c38da850df3454da88ee9a6fde0ba79acceee0e79daba"}, - {file = "numpy-2.2.2-cp312-cp312-win32.whl", hash = "sha256:4525b88c11906d5ab1b0ec1f290996c0020dd318af8b49acaa46f198b1ffc283"}, - {file = "numpy-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:5acea83b801e98541619af398cc0109ff48016955cc0818f478ee9ef1c5c3dcb"}, - {file = "numpy-2.2.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b208cfd4f5fe34e1535c08983a1a6803fdbc7a1e86cf13dd0c61de0b51a0aadc"}, - {file = "numpy-2.2.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d0bbe7dd86dca64854f4b6ce2ea5c60b51e36dfd597300057cf473d3615f2369"}, - {file = "numpy-2.2.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:22ea3bb552ade325530e72a0c557cdf2dea8914d3a5e1fecf58fa5dbcc6f43cd"}, - {file = "numpy-2.2.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:128c41c085cab8a85dc29e66ed88c05613dccf6bc28b3866cd16050a2f5448be"}, - {file = "numpy-2.2.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:250c16b277e3b809ac20d1f590716597481061b514223c7badb7a0f9993c7f84"}, - {file = "numpy-2.2.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0c8854b09bc4de7b041148d8550d3bd712b5c21ff6a8ed308085f190235d7ff"}, - {file = "numpy-2.2.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b6fb9c32a91ec32a689ec6410def76443e3c750e7cfc3fb2206b985ffb2b85f0"}, - {file = "numpy-2.2.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:57b4012e04cc12b78590a334907e01b3a85efb2107df2b8733ff1ed05fce71de"}, - {file = "numpy-2.2.2-cp313-cp313-win32.whl", hash = "sha256:4dbd80e453bd34bd003b16bd802fac70ad76bd463f81f0c518d1245b1c55e3d9"}, - {file = "numpy-2.2.2-cp313-cp313-win_amd64.whl", hash = "sha256:5a8c863ceacae696aff37d1fd636121f1a512117652e5dfb86031c8d84836369"}, - {file = "numpy-2.2.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:b3482cb7b3325faa5f6bc179649406058253d91ceda359c104dac0ad320e1391"}, - {file = "numpy-2.2.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:9491100aba630910489c1d0158034e1c9a6546f0b1340f716d522dc103788e39"}, - {file = "numpy-2.2.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:41184c416143defa34cc8eb9d070b0a5ba4f13a0fa96a709e20584638254b317"}, - {file = "numpy-2.2.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:7dca87ca328f5ea7dafc907c5ec100d187911f94825f8700caac0b3f4c384b49"}, - {file = "numpy-2.2.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bc61b307655d1a7f9f4b043628b9f2b721e80839914ede634e3d485913e1fb2"}, - {file = "numpy-2.2.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fad446ad0bc886855ddf5909cbf8cb5d0faa637aaa6277fb4b19ade134ab3c7"}, - {file = "numpy-2.2.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:149d1113ac15005652e8d0d3f6fd599360e1a708a4f98e43c9c77834a28238cb"}, - {file = "numpy-2.2.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:106397dbbb1896f99e044efc90360d098b3335060375c26aa89c0d8a97c5f648"}, - {file = "numpy-2.2.2-cp313-cp313t-win32.whl", hash = "sha256:0eec19f8af947a61e968d5429f0bd92fec46d92b0008d0a6685b40d6adf8a4f4"}, - {file = "numpy-2.2.2-cp313-cp313t-win_amd64.whl", hash = "sha256:97b974d3ba0fb4612b77ed35d7627490e8e3dff56ab41454d9e8b23448940576"}, - {file = "numpy-2.2.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b0531f0b0e07643eb089df4c509d30d72c9ef40defa53e41363eca8a8cc61495"}, - {file = "numpy-2.2.2-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:e9e82dcb3f2ebbc8cb5ce1102d5f1c5ed236bf8a11730fb45ba82e2841ec21df"}, - {file = "numpy-2.2.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0d4142eb40ca6f94539e4db929410f2a46052a0fe7a2c1c59f6179c39938d2a"}, - {file = "numpy-2.2.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:356ca982c188acbfa6af0d694284d8cf20e95b1c3d0aefa8929376fea9146f60"}, - {file = "numpy-2.2.2.tar.gz", hash = "sha256:ed6906f61834d687738d25988ae117683705636936cc605be0bb208b23df4d8f"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b"}, + {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd"}, + {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318"}, + {file = "numpy-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8"}, + {file = "numpy-2.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326"}, + {file = "numpy-2.0.2-cp310-cp310-win32.whl", hash = "sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97"}, + {file = "numpy-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a"}, + {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669"}, + {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951"}, + {file = "numpy-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9"}, + {file = "numpy-2.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15"}, + {file = "numpy-2.0.2-cp311-cp311-win32.whl", hash = "sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4"}, + {file = "numpy-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c"}, + {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692"}, + {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a"}, + {file = "numpy-2.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c"}, + {file = "numpy-2.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded"}, + {file = "numpy-2.0.2-cp312-cp312-win32.whl", hash = "sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5"}, + {file = "numpy-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729"}, + {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1"}, + {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd"}, + {file = "numpy-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d"}, + {file = "numpy-2.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d"}, + {file = "numpy-2.0.2-cp39-cp39-win32.whl", hash = "sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa"}, + {file = "numpy-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385"}, + {file = "numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"}, ] [[package]] @@ -2636,4 +2624,4 @@ plot = ["matplotlib", "plotly"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.14" -content-hash = "b87307deab6d125136242de2adc36049337970d6abea23392c9fdf57761230a6" +content-hash = "d9a431e414844f95182814d46eed5988be22352ac728a0693f8e1fcffb0aa351" diff --git a/pyproject.toml b/pyproject.toml index bbbfd633..c4264a0f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,7 +46,7 @@ dependencies = [ "humanfriendly (>=10.0,<11.0)", "randomname (>=0.2.1,<0.3.0)", "codecarbon (>=2.8.3,<3.0.0)", - "numpy (>=2.2.2,<3.0.0)", + "numpy (>=2.0.0,<3.0.0)", "flatdict (>=4.0.1,<5.0.0)", "semver (>=3.0.4,<4.0.0)", "email-validator (>=2.2.0,<3.0.0)", From 9f8ba81497b6786c8e12aefaacd02eb460397e60 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 12 Feb 2025 14:22:58 +0000 Subject: [PATCH 09/56] Make timestamp validation in utilities consistent --- simvue/utilities.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/simvue/utilities.py b/simvue/utilities.py index 69a8ecd8..df9e80b1 100644 --- a/simvue/utilities.py +++ b/simvue/utilities.py @@ -357,7 +357,7 @@ def validate_timestamp(timestamp): Validate a user-provided timestamp """ try: - datetime.datetime.strptime(timestamp, "%Y-%m-%d %H:%M:%S.%f") + datetime.datetime.strptime(timestamp, DATETIME_FORMAT) except ValueError: return False From 8e317593f72ad61170079755f8a5c9dee792934e Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 12 Feb 2025 15:03:20 +0000 Subject: [PATCH 10/56] Added missing runtime attribute to Run --- simvue/api/objects/run.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 9841ded5..f218bbff 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -251,6 +251,13 @@ def created(self) -> datetime.datetime | None: datetime.datetime.strptime(_created, DATETIME_FORMAT) if _created else None ) + @property + @staging_check + def runtime(self) -> datetime.datetime | None: + """Retrieve created datetime for the run""" + _runtime: str | None = self._get_attribute("runtime") + return datetime.datetime.strptime(_runtime, "%H:%M:%S.%f") if _runtime else None + @property @staging_check def started(self) -> datetime.datetime | None: From d17678724f9c47d35c1b053d9ad4e61d1df63977 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 12 Feb 2025 15:05:48 +0000 Subject: [PATCH 11/56] Use time not datetime --- simvue/api/objects/run.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index f218bbff..08262dd0 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -11,6 +11,7 @@ import typing import pydantic import datetime +import time try: from typing import Self @@ -256,7 +257,7 @@ def created(self) -> datetime.datetime | None: def runtime(self) -> datetime.datetime | None: """Retrieve created datetime for the run""" _runtime: str | None = self._get_attribute("runtime") - return datetime.datetime.strptime(_runtime, "%H:%M:%S.%f") if _runtime else None + return time.strptime(_runtime, "%H:%M:%S.%f") if _runtime else None @property @staging_check From 21d0f0c0ba2d89df9c0351e462785e3124606ba5 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 12 Feb 2025 16:54:28 +0000 Subject: [PATCH 12/56] Improved get_artifacts_as_files test --- tests/functional/test_client.py | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/tests/functional/test_client.py b/tests/functional/test_client.py index 3b6c6f24..37c1a1ae 100644 --- a/tests/functional/test_client.py +++ b/tests/functional/test_client.py @@ -145,12 +145,21 @@ def test_get_artifacts_as_files( create_test_run[1]["run_id"], category=category, output_dir=tempd ) files = [os.path.basename(i) for i in glob.glob(os.path.join(tempd, "*"))] - if not category or category == "input": - assert create_test_run[1]["file_1"] in files - if not category or category == "output": - assert create_test_run[1]["file_2"] in files - if not category or category == "code": - assert create_test_run[1]["file_3"] in files + + if not category: + expected_files = ["file_1", "file_2", "file_3"] + elif category == "input": + expected_files = ["file_1"] + elif category == "output": + expected_files = ["file_2"] + elif category == "code": + expected_files = ["file_3"] + + for file in ["file_1", "file_2", "file_3"]: + if file in expected_files: + assert create_test_run[1][file] in files + else: + assert create_test_run[1][file] not in files @pytest.mark.dependency From cd80cda4fac74232df8d763356790eb5db958c5e Mon Sep 17 00:00:00 2001 From: Matt Field Date: Thu, 13 Feb 2025 09:43:14 +0000 Subject: [PATCH 13/56] Added new from_run method to Artifact and fixed client --- simvue/api/objects/artifact/fetch.py | 51 +++++++++++++++++++--------- simvue/client.py | 6 ++-- 2 files changed, 38 insertions(+), 19 deletions(-) diff --git a/simvue/api/objects/artifact/fetch.py b/simvue/api/objects/artifact/fetch.py index 88f582ce..d7b163ca 100644 --- a/simvue/api/objects/artifact/fetch.py +++ b/simvue/api/objects/artifact/fetch.py @@ -23,6 +23,37 @@ def __new__(cls, identifier: str | None = None, **kwargs): else: return ObjectArtifact(identifier=identifier, **kwargs) + @classmethod + def from_run( + cls, + run_id: str, + category: typing.Literal["input", "output", "code"] | None = None, + **kwargs, + ) -> typing.Generator[tuple[str, FileArtifact | ObjectArtifact], None, None]: + _temp = ArtifactBase(**kwargs) + _url = URL(_temp._user_config.server.url) / f"runs/{run_id}/artifacts" + _response = sv_get( + url=f"{_url}", params={"category": category}, headers=_temp._headers + ) + _json_response = get_json_from_response( + expected_type=list, + response=_response, + expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], + scenario=f"Retrieval of artifacts for run '{run_id}'", + ) + + if _response.status_code == http.HTTPStatus.NOT_FOUND or not _json_response: + raise ObjectNotFoundError( + _temp._label, category, extra=f"for run '{run_id}'" + ) + + for _entry in _json_response: + _id = _entry.pop("id") + yield ( + _id, + Artifact(_local=True, _read_only=True, identifier=_id, **_entry), + ) + @classmethod def from_name( cls, run_id: str, name: str, **kwargs @@ -99,21 +130,9 @@ def get( if (_data := _json_response.get("data")) is None: raise RuntimeError(f"Expected key 'data' for retrieval of {_label}s") - _out_dict: dict[str, FileArtifact | ObjectArtifact] = {} - for _entry in _data: _id = _entry.pop("id") - if _entry["original_path"]: - yield ( - _id, - FileArtifact( - _local=True, _read_only=True, identifier=_id, **_entry - ), - ) - else: - yield ( - _id, - ObjectArtifact( - _local=True, _read_only=True, identifier=_id, **_entry - ), - ) + yield ( + _id, + Artifact(_local=True, _read_only=True, identifier=_id, **_entry), + ) diff --git a/simvue/client.py b/simvue/client.py index 45f152c5..c8e7979e 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -590,9 +590,9 @@ def get_artifacts_as_files( RuntimeError if there was a failure retrieving artifacts from the server """ - _artifacts: typing.Generator[tuple[str, Artifact], None, None] = Artifact.get( - runs=json.dumps([run_id]), category=category - ) # type: ignore + _artifacts: typing.Generator[tuple[str, Artifact], None, None] = ( + Artifact.from_run(run_id=run_id, category=category) + ) with ThreadPoolExecutor(CONCURRENT_DOWNLOADS) as executor: futures = [ From 6f7c16c6d3764121e679c36873faab620a76d4e0 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Thu, 13 Feb 2025 10:32:17 +0000 Subject: [PATCH 14/56] Add docstrings to new artifact methods --- simvue/api/objects/artifact/fetch.py | 25 +++++++++++++++++++++++++ simvue/client.py | 11 ++--------- 2 files changed, 27 insertions(+), 9 deletions(-) diff --git a/simvue/api/objects/artifact/fetch.py b/simvue/api/objects/artifact/fetch.py index d7b163ca..1d571266 100644 --- a/simvue/api/objects/artifact/fetch.py +++ b/simvue/api/objects/artifact/fetch.py @@ -30,6 +30,31 @@ def from_run( category: typing.Literal["input", "output", "code"] | None = None, **kwargs, ) -> typing.Generator[tuple[str, FileArtifact | ObjectArtifact], None, None]: + """Return artifacts associated with a given run. + + Parameters + ---------- + run_id : str + The ID of the run to retriece artifacts from + category : typing.Literal["input", "output", "code"] | None, optional + The category of artifacts to return, by default all artifacts are returned + + Returns + ------- + typing.Generator[tuple[str, FileArtifact | ObjectArtifact], None, None] + The artifacts + + Yields + ------ + Iterator[typing.Generator[tuple[str, FileArtifact | ObjectArtifact], None, None]] + identifier for artifact + the artifact itself as a class instance + + Raises + ------ + ObjectNotFoundError + Raised if artifacts could not be found for that run + """ _temp = ArtifactBase(**kwargs) _url = URL(_temp._user_config.server.url) / f"runs/{run_id}/artifacts" _response = sv_get( diff --git a/simvue/client.py b/simvue/client.py index c8e7979e..df01159a 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -565,9 +565,6 @@ def get_artifacts_as_files( run_id: str, category: typing.Literal["input", "output", "code"] | None = None, output_dir: pydantic.DirectoryPath | None = None, - startswith: str | None = None, - contains: str | None = None, - endswith: str | None = None, ) -> None: """Retrieve artifacts from the given run as a set of files @@ -575,15 +572,11 @@ def get_artifacts_as_files( ---------- run_id : str the unique identifier for the run + category : typing.Literal["input", "output", "code"] | + the type of files to retrieve output_dir : str | None, optional location to download files to, the default of None will download them to the current working directory - startswith : str, optional - only download artifacts with this prefix in their name, by default None - contains : str, optional - only download artifacts containing this term in their name, by default None - endswith : str, optional - only download artifacts ending in this term in their name, by default None Raises ------ From 5a69cb1ec4173712c8faca844a2a54e73a7ddd10 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Thu, 13 Feb 2025 11:10:44 +0000 Subject: [PATCH 15/56] Fix hierarchical artifact download --- simvue/client.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/simvue/client.py b/simvue/client.py index df01159a..c69e6b3a 100644 --- a/simvue/client.py +++ b/simvue/client.py @@ -9,7 +9,6 @@ import contextlib import json import logging -import os import pathlib import typing import http @@ -45,12 +44,9 @@ def _download_artifact_to_file( artifact: Artifact, output_dir: pathlib.Path | None ) -> None: - try: - _file_name = os.path.basename(artifact.name) - except AttributeError: - _file_name = os.path.basename(artifact) - _output_file = (output_dir or pathlib.Path.cwd()).joinpath(_file_name) - + _output_file = (output_dir or pathlib.Path.cwd()).joinpath(artifact.name) + # If this is a hierarchical structure being downloaded, need to create directories + _output_file.parent.mkdir(parents=True, exist_ok=True) with _output_file.open("wb") as out_f: for content in artifact.download_content(): out_f.write(content) From 53152526cbb321dae2b5634ca2c6225b9ec42d13 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Thu, 13 Feb 2025 15:03:10 +0000 Subject: [PATCH 16/56] Add params option to post and put --- simvue/api/objects/alert/base.py | 1 + simvue/api/objects/base.py | 7 +++++-- simvue/api/request.py | 10 +++++++++- simvue/run.py | 16 ++-------------- 4 files changed, 17 insertions(+), 17 deletions(-) diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index 0204f6d3..58bc7ccb 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -29,6 +29,7 @@ def new(cls, **kwargs): def __init__(self, identifier: str | None = None, **kwargs) -> None: """Retrieve an alert from the Simvue server by identifier""" self._label = "alert" + self._staging = {"deduplicate": True} super().__init__(identifier=identifier, **kwargs) def compare(self, other: "AlertBase") -> bool: diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 7f43904f..840e8b66 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -169,6 +169,8 @@ def __init__( "User-Agent": _user_agent or f"Simvue Python client {__version__}", } + self._params: dict[str, str] = {} + self._staging: dict[str, typing.Any] = {} # If this object is read-only, but not a local construction, make an API call @@ -412,6 +414,7 @@ def _post(self, is_json: bool = True, **kwargs) -> dict[str, typing.Any]: _response = sv_post( url=f"{self._base_url}", headers=self._headers | {"Content-Type": "application/msgpack"}, + params=self._params, data=kwargs, is_json=is_json, ) @@ -423,7 +426,7 @@ def _post(self, is_json: bool = True, **kwargs) -> dict[str, typing.Any]: _json_response = get_json_from_response( response=_response, - expected_status=[http.HTTPStatus.OK], + expected_status=[http.HTTPStatus.OK, http.HTTPStatus.CONFLICT], scenario=f"Creation of {self._label}", ) @@ -452,7 +455,7 @@ def _put(self, **kwargs) -> dict[str, typing.Any]: return get_json_from_response( response=_response, - expected_status=[http.HTTPStatus.OK], + expected_status=[http.HTTPStatus.OK, http.HTTPStatus.CONFLICT], scenario=f"Creation of {self._label} '{self._identifier}", ) diff --git a/simvue/api/request.py b/simvue/api/request.py index 3ebdb86b..8dd6a8bd 100644 --- a/simvue/api/request.py +++ b/simvue/api/request.py @@ -64,6 +64,7 @@ def is_retryable_exception(exception: Exception) -> bool: def post( url: str, headers: dict[str, str], + params: dict[str, str], data: typing.Any, is_json: bool = True, files: dict[str, typing.Any] | None = None, @@ -76,6 +77,8 @@ def post( URL to post to headers : dict[str, str] headers for the post request + params : dict[str, str] + query parameters for the post request data : dict[str, typing.Any] data to post is_json : bool, optional @@ -95,7 +98,12 @@ def post( logging.debug(f"POST: {url}\n\tdata={data_sent}") response = requests.post( - url, headers=headers, data=data_sent, timeout=DEFAULT_API_TIMEOUT, files=files + url, + headers=headers, + params=params, + data=data_sent, + timeout=DEFAULT_API_TIMEOUT, + files=files, ) if response.status_code == http.HTTPStatus.UNPROCESSABLE_ENTITY: diff --git a/simvue/run.py b/simvue/run.py index af7a5aa0..f4397a0b 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -1651,20 +1651,8 @@ def add_alerts( return False def _attach_alert_to_run(self, alert: AlertBase) -> str | None: - # Check if the alert already exists - _alert_id: str | None = None - - for _, _existing_alert in Alert.get( - offline=self._user_config.run.mode == "offline" - ): - if _existing_alert.compare(alert): - _alert_id = _existing_alert.id - logger.info("Existing alert found with id: %s", _existing_alert.id) - break - - if not _alert_id: - alert.commit() - _alert_id = alert.id + alert.commit() + _alert_id: str = alert.id self._sv_obj.alerts = [_alert_id] From 0fb38ea57cd1c622a6cb40a33ccf9549e465d1c4 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Thu, 13 Feb 2025 15:07:47 +0000 Subject: [PATCH 17/56] Add optional attach to run --- simvue/run.py | 40 +++++++++++++++++++++++++++++----------- 1 file changed, 29 insertions(+), 11 deletions(-) diff --git a/simvue/run.py b/simvue/run.py index f4397a0b..c7c8c755 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -1651,15 +1651,9 @@ def add_alerts( return False def _attach_alert_to_run(self, alert: AlertBase) -> str | None: - alert.commit() - _alert_id: str = alert.id - - self._sv_obj.alerts = [_alert_id] - + self._sv_obj.alerts = [alert.id] self._sv_obj.commit() - return _alert_id - @skip_if_failed("_aborted", "_suppress_errors", None) @check_run_initialised @pydantic.validate_call @@ -1679,6 +1673,7 @@ def create_metric_range_alert( ] = "average", notification: typing.Literal["email", "none"] = "none", trigger_abort: bool = False, + attach_to_run: bool = True, ) -> str | None: """Creates a metric range alert with the specified name (if it doesn't exist) and applies it to the current run. If alert already exists it will @@ -1708,6 +1703,8 @@ def create_metric_range_alert( whether to notify on trigger, by default "none" trigger_abort : bool, optional whether this alert can trigger a run abort, default False + attach_to_run : bool, optional + whether to attach this alert to the current run, default True Returns ------- @@ -1729,7 +1726,10 @@ def create_metric_range_alert( offline=self._user_config.run.mode == "offline", ) _alert.abort = trigger_abort - return self._attach_alert_to_run(_alert) + _alert.commit() + if attach_to_run: + self._attach_alert_to_run(_alert) + return _alert.id @skip_if_failed("_aborted", "_suppress_errors", None) @check_run_initialised @@ -1749,6 +1749,7 @@ def create_metric_threshold_alert( ] = "average", notification: typing.Literal["email", "none"] = "none", trigger_abort: bool = False, + attach_to_run: bool = True, ) -> str | None: """Creates a metric threshold alert with the specified name (if it doesn't exist) and applies it to the current run. If alert already exists it will @@ -1776,6 +1777,8 @@ def create_metric_threshold_alert( whether to notify on trigger, by default "none" trigger_abort : bool, optional whether this alert can trigger a run abort, default False + attach_to_run : bool, optional + whether to attach this alert to the current run, default True Returns ------- @@ -1796,7 +1799,10 @@ def create_metric_threshold_alert( offline=self._user_config.run.mode == "offline", ) _alert.abort = trigger_abort - return self._attach_alert_to_run(_alert) + _alert.commit() + if attach_to_run: + self._attach_alert_to_run(_alert) + return _alert.id @skip_if_failed("_aborted", "_suppress_errors", None) @check_run_initialised @@ -1810,6 +1816,7 @@ def create_event_alert( frequency: pydantic.PositiveInt = 1, notification: typing.Literal["email", "none"] = "none", trigger_abort: bool = False, + attach_to_run: bool = True, ) -> str | None: """Creates an events alert with the specified name (if it doesn't exist) and applies it to the current run. If alert already exists it will @@ -1827,6 +1834,8 @@ def create_event_alert( whether to notify on trigger, by default "none" trigger_abort : bool, optional whether this alert can trigger a run abort + attach_to_run : bool, optional + whether to attach this alert to the current run, default True Returns ------- @@ -1843,7 +1852,10 @@ def create_event_alert( offline=self._user_config.run.mode == "offline", ) _alert.abort = trigger_abort - return self._attach_alert_to_run(_alert) + _alert.commit() + if attach_to_run: + self._attach_alert_to_run(_alert) + return _alert.id @skip_if_failed("_aborted", "_suppress_errors", None) @check_run_initialised @@ -1855,6 +1867,7 @@ def create_user_alert( description: str | None = None, notification: typing.Literal["email", "none"] = "none", trigger_abort: bool = False, + attach_to_run: bool = True, ) -> None: """Creates a user alert with the specified name (if it doesn't exist) and applies it to the current run. If alert already exists it will @@ -1870,6 +1883,8 @@ def create_user_alert( whether to notify on trigger, by default "none" trigger_abort : bool, optional whether this alert can trigger a run abort, default False + attach_to_run : bool, optional + whether to attach this alert to the current run, default True Returns ------- @@ -1884,7 +1899,10 @@ def create_user_alert( offline=self._user_config.run.mode == "offline", ) _alert.abort = trigger_abort - return self._attach_alert_to_run(_alert) + _alert.commit() + if attach_to_run: + self._attach_alert_to_run(_alert) + return _alert.id @skip_if_failed("_aborted", "_suppress_errors", False) @check_run_initialised From 8ba756fdf5d48839f9c94446168a5987fe141c99 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Thu, 13 Feb 2025 15:42:20 +0000 Subject: [PATCH 18/56] Added params correctly in new of each alert --- simvue/api/objects/alert/base.py | 1 - simvue/api/objects/alert/events.py | 1 + simvue/api/objects/alert/metrics.py | 3 ++ simvue/api/objects/alert/user.py | 4 +- simvue/api/objects/artifact/fetch.py | 76 ++++++++++++++++++++++------ simvue/run.py | 1 + 6 files changed, 68 insertions(+), 18 deletions(-) diff --git a/simvue/api/objects/alert/base.py b/simvue/api/objects/alert/base.py index 58bc7ccb..0204f6d3 100644 --- a/simvue/api/objects/alert/base.py +++ b/simvue/api/objects/alert/base.py @@ -29,7 +29,6 @@ def new(cls, **kwargs): def __init__(self, identifier: str | None = None, **kwargs) -> None: """Retrieve an alert from the Simvue server by identifier""" self._label = "alert" - self._staging = {"deduplicate": True} super().__init__(identifier=identifier, **kwargs) def compare(self, other: "AlertBase") -> bool: diff --git a/simvue/api/objects/alert/events.py b/simvue/api/objects/alert/events.py index 0d38b63f..00558ffb 100644 --- a/simvue/api/objects/alert/events.py +++ b/simvue/api/objects/alert/events.py @@ -87,6 +87,7 @@ def new( _offline=offline, ) _alert._staging |= _alert_definition + _alert._params = {"deduplicate": True} return _alert diff --git a/simvue/api/objects/alert/metrics.py b/simvue/api/objects/alert/metrics.py index 2fb74f06..e9340873 100644 --- a/simvue/api/objects/alert/metrics.py +++ b/simvue/api/objects/alert/metrics.py @@ -105,6 +105,8 @@ def new( _offline=offline, ) _alert._staging |= _alert_definition + _alert._params = {"deduplicate": True} + return _alert @@ -194,6 +196,7 @@ def new( _offline=offline, ) _alert._staging |= _alert_definition + _alert._params = {"deduplicate": True} return _alert diff --git a/simvue/api/objects/alert/user.py b/simvue/api/objects/alert/user.py index 9ddcd6e1..7fdcee43 100644 --- a/simvue/api/objects/alert/user.py +++ b/simvue/api/objects/alert/user.py @@ -57,7 +57,7 @@ def new( whether this alert should be created locally, default is False """ - return UserAlert( + _alert = UserAlert( name=name, description=description, notification=notification, @@ -66,6 +66,8 @@ def new( _read_only=False, _offline=offline, ) + _alert._params = {"deduplicate": True} + return _alert @classmethod def get( diff --git a/simvue/api/objects/artifact/fetch.py b/simvue/api/objects/artifact/fetch.py index 88f582ce..1d571266 100644 --- a/simvue/api/objects/artifact/fetch.py +++ b/simvue/api/objects/artifact/fetch.py @@ -23,6 +23,62 @@ def __new__(cls, identifier: str | None = None, **kwargs): else: return ObjectArtifact(identifier=identifier, **kwargs) + @classmethod + def from_run( + cls, + run_id: str, + category: typing.Literal["input", "output", "code"] | None = None, + **kwargs, + ) -> typing.Generator[tuple[str, FileArtifact | ObjectArtifact], None, None]: + """Return artifacts associated with a given run. + + Parameters + ---------- + run_id : str + The ID of the run to retriece artifacts from + category : typing.Literal["input", "output", "code"] | None, optional + The category of artifacts to return, by default all artifacts are returned + + Returns + ------- + typing.Generator[tuple[str, FileArtifact | ObjectArtifact], None, None] + The artifacts + + Yields + ------ + Iterator[typing.Generator[tuple[str, FileArtifact | ObjectArtifact], None, None]] + identifier for artifact + the artifact itself as a class instance + + Raises + ------ + ObjectNotFoundError + Raised if artifacts could not be found for that run + """ + _temp = ArtifactBase(**kwargs) + _url = URL(_temp._user_config.server.url) / f"runs/{run_id}/artifacts" + _response = sv_get( + url=f"{_url}", params={"category": category}, headers=_temp._headers + ) + _json_response = get_json_from_response( + expected_type=list, + response=_response, + expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], + scenario=f"Retrieval of artifacts for run '{run_id}'", + ) + + if _response.status_code == http.HTTPStatus.NOT_FOUND or not _json_response: + raise ObjectNotFoundError( + _temp._label, category, extra=f"for run '{run_id}'" + ) + + for _entry in _json_response: + _id = _entry.pop("id") + yield ( + _id, + Artifact(_local=True, _read_only=True, identifier=_id, **_entry), + ) + @classmethod def from_name( cls, run_id: str, name: str, **kwargs @@ -99,21 +155,9 @@ def get( if (_data := _json_response.get("data")) is None: raise RuntimeError(f"Expected key 'data' for retrieval of {_label}s") - _out_dict: dict[str, FileArtifact | ObjectArtifact] = {} - for _entry in _data: _id = _entry.pop("id") - if _entry["original_path"]: - yield ( - _id, - FileArtifact( - _local=True, _read_only=True, identifier=_id, **_entry - ), - ) - else: - yield ( - _id, - ObjectArtifact( - _local=True, _read_only=True, identifier=_id, **_entry - ), - ) + yield ( + _id, + Artifact(_local=True, _read_only=True, identifier=_id, **_entry), + ) diff --git a/simvue/run.py b/simvue/run.py index c7c8c755..9a3d2a9b 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -1798,6 +1798,7 @@ def create_metric_threshold_alert( notification=notification, offline=self._user_config.run.mode == "offline", ) + _alert.abort = trigger_abort _alert.commit() if attach_to_run: From e2669b47bc34aec11a0c842344c67ac622f9907e Mon Sep 17 00:00:00 2001 From: Matt Field Date: Thu, 13 Feb 2025 15:53:17 +0000 Subject: [PATCH 19/56] Fixed post in artifacts --- simvue/api/objects/artifact/base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/simvue/api/objects/artifact/base.py b/simvue/api/objects/artifact/base.py index dac009f5..0b5af04d 100644 --- a/simvue/api/objects/artifact/base.py +++ b/simvue/api/objects/artifact/base.py @@ -97,6 +97,7 @@ def _upload(self, file: io.BytesIO) -> None: _response = sv_post( url=_url, headers={}, + params={}, is_json=False, files={"file": file}, data=self._init_data.get("fields"), From 2d0af8d6556166ac44f6c39ea1a2f807d6725398 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Thu, 13 Feb 2025 16:24:25 +0000 Subject: [PATCH 20/56] Drop requirement for initialized run for creating alerts and move to attach to run --- simvue/run.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/simvue/run.py b/simvue/run.py index e46295d3..38c40398 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -1652,12 +1652,12 @@ def add_alerts( return False + @check_run_initialised def _attach_alert_to_run(self, alert: AlertBase) -> str | None: self._sv_obj.alerts = [alert.id] self._sv_obj.commit() @skip_if_failed("_aborted", "_suppress_errors", None) - @check_run_initialised @pydantic.validate_call def create_metric_range_alert( self, @@ -1734,7 +1734,6 @@ def create_metric_range_alert( return _alert.id @skip_if_failed("_aborted", "_suppress_errors", None) - @check_run_initialised @pydantic.validate_call def create_metric_threshold_alert( self, @@ -1808,7 +1807,6 @@ def create_metric_threshold_alert( return _alert.id @skip_if_failed("_aborted", "_suppress_errors", None) - @check_run_initialised @pydantic.validate_call def create_event_alert( self, @@ -1861,7 +1859,6 @@ def create_event_alert( return _alert.id @skip_if_failed("_aborted", "_suppress_errors", None) - @check_run_initialised @pydantic.validate_call def create_user_alert( self, From 71c157e6c59bb1d65ec48fb8aa63f28e0eeed039 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Thu, 13 Feb 2025 16:24:25 +0000 Subject: [PATCH 21/56] Drop requirement for initialized run for creating alerts and move to attach to run --- simvue/run.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/simvue/run.py b/simvue/run.py index 9a3d2a9b..63369751 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -1650,12 +1650,12 @@ def add_alerts( return False + @check_run_initialised def _attach_alert_to_run(self, alert: AlertBase) -> str | None: self._sv_obj.alerts = [alert.id] self._sv_obj.commit() @skip_if_failed("_aborted", "_suppress_errors", None) - @check_run_initialised @pydantic.validate_call def create_metric_range_alert( self, @@ -1732,7 +1732,6 @@ def create_metric_range_alert( return _alert.id @skip_if_failed("_aborted", "_suppress_errors", None) - @check_run_initialised @pydantic.validate_call def create_metric_threshold_alert( self, @@ -1806,7 +1805,6 @@ def create_metric_threshold_alert( return _alert.id @skip_if_failed("_aborted", "_suppress_errors", None) - @check_run_initialised @pydantic.validate_call def create_event_alert( self, @@ -1859,7 +1857,6 @@ def create_event_alert( return _alert.id @skip_if_failed("_aborted", "_suppress_errors", None) - @check_run_initialised @pydantic.validate_call def create_user_alert( self, From 95f5845872109952e5ee65bbb9c772b0b7f0f12d Mon Sep 17 00:00:00 2001 From: Matt Field Date: Thu, 13 Feb 2025 17:02:52 +0000 Subject: [PATCH 22/56] Change .alerts to return a list and fix add_alerts --- simvue/api/objects/run.py | 5 ++--- simvue/run.py | 2 +- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 9841ded5..cbc470a3 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -225,9 +225,8 @@ def notifications(self, notifications: typing.Literal["none", "email"]) -> None: @property @staging_check - def alerts(self) -> typing.Generator[str, None, None]: - for alert in self.get_alert_details(): - yield alert["id"] + def alerts(self) -> list[str]: + return [alert["id"] for alert in self.get_alert_details()] def get_alert_details(self) -> typing.Generator[dict[str, typing.Any], None, None]: """Retrieve the full details of alerts for this run""" diff --git a/simvue/run.py b/simvue/run.py index 63369751..12c006de 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -1645,7 +1645,7 @@ def add_alerts( return False # Avoid duplication - self._sv_obj.alerts = list(set(self._sv_obj.alerts + [ids])) + self._sv_obj.alerts = list(set(self._sv_obj.alerts + ids)) self._sv_obj.commit() return False From 607fb9f0d07969c2a65ba17f7cf922817a1cde9b Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 14 Feb 2025 09:34:20 +0000 Subject: [PATCH 23/56] Removed logger.setLevel from dispatch --- simvue/factory/dispatch/queued.py | 1 - 1 file changed, 1 deletion(-) diff --git a/simvue/factory/dispatch/queued.py b/simvue/factory/dispatch/queued.py index 6ebf5ff3..ae5b094e 100644 --- a/simvue/factory/dispatch/queued.py +++ b/simvue/factory/dispatch/queued.py @@ -21,7 +21,6 @@ QUEUE_SIZE = 10000 logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) class QueuedDispatcher(threading.Thread, DispatcherBaseClass): From 65dfac4d36f295520aeafa568f07db8723263e22 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 14 Feb 2025 13:06:36 +0000 Subject: [PATCH 24/56] Added deepmerge to dependencies and extra tests --- poetry.lock | 20 +++++++++-- pyproject.toml | 1 + simvue/api/objects/base.py | 6 ++-- tests/functional/test_run_class.py | 53 ++++++++++++++++++++++++++---- 4 files changed, 69 insertions(+), 11 deletions(-) diff --git a/poetry.lock b/poetry.lock index aa295ce7..4371e2de 100644 --- a/poetry.lock +++ b/poetry.lock @@ -505,7 +505,6 @@ files = [ {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:761817a3377ef15ac23cd7834715081791d4ec77f9297ee694ca1ee9c2c7e5eb"}, {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3c672a53c0fb4725a29c303be906d3c1fa99c32f58abe008a82705f9ee96f40b"}, {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4ac4c9f37eba52cb6fbeaf5b59c152ea976726b865bd4cf87883a7e7006cc543"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:60eb32934076fa07e4316b7b2742fa52cbb190b42c2df2863dbc4230a0a9b385"}, {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ed3534eb1090483c96178fcb0f8893719d96d5274dfde98aa6add34614e97c8e"}, {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f3f6fdfa89ee2d9d496e2c087cebef9d4fcbb0ad63c40e821b39f74bf48d9c5e"}, {file = "cryptography-44.0.0-cp37-abi3-win32.whl", hash = "sha256:eb33480f1bad5b78233b0ad3e1b0be21e8ef1da745d8d2aecbb20671658b9053"}, @@ -516,7 +515,6 @@ files = [ {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c5eb858beed7835e5ad1faba59e865109f3e52b3783b9ac21e7e47dc5554e289"}, {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f53c2c87e0fb4b0c00fa9571082a057e37690a8f12233306161c8f4b819960b7"}, {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9e6fc8a08e116fb7c7dd1f040074c9d7b51d74a8ea40d4df2fc7aa08b76b9e6c"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:9abcc2e083cbe8dde89124a47e5e53ec38751f0d7dfd36801008f316a127d7ba"}, {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d2436114e46b36d00f8b72ff57e598978b37399d2786fd39793c36c6d5cb1c64"}, {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a01956ddfa0a6790d594f5b34fc1bfa6098aca434696a03cfdbe469b8ed79285"}, {file = "cryptography-44.0.0-cp39-abi3-win32.whl", hash = "sha256:eca27345e1214d1b9f9490d200f9db5a874479be914199194e746c893788d417"}, @@ -560,6 +558,22 @@ files = [ docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] tests = ["pytest", "pytest-cov", "pytest-xdist"] +[[package]] +name = "deepmerge" +version = "2.0" +description = "A toolset for deeply merging Python dictionaries." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "deepmerge-2.0-py3-none-any.whl", hash = "sha256:6de9ce507115cff0bed95ff0ce9ecc31088ef50cbdf09bc90a09349a318b3d00"}, + {file = "deepmerge-2.0.tar.gz", hash = "sha256:5c3d86081fbebd04dd5de03626a0607b809a98fb6ccba5770b62466fe940ff20"}, +] + +[package.extras] +dev = ["black", "build", "mypy", "pytest", "pyupgrade", "twine", "validate-pyproject[all]"] + [[package]] name = "dnspython" version = "2.7.0" @@ -2636,4 +2650,4 @@ plot = ["matplotlib", "plotly"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.14" -content-hash = "b87307deab6d125136242de2adc36049337970d6abea23392c9fdf57761230a6" +content-hash = "44571ffd2089db9c20200ad205318c25e0c419891b7bea5cef0e0fd4c918b7d3" diff --git a/pyproject.toml b/pyproject.toml index bbbfd633..194ba81f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,6 +53,7 @@ dependencies = [ "psutil (>=6.1.1,<7.0.0)", "tenacity (>=9.0.0,<10.0.0)", "typing-extensions (>=4.12.2,<5.0.0) ; python_version < \"3.11\"", + "deepmerge (>=2.0,<3.0)", ] [project.urls] diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 7f43904f..cc683757 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -16,6 +16,7 @@ import msgpack import pydantic +from deepmerge import always_merger from simvue.config.user import SimvueConfiguration from simvue.exception import ObjectNotFoundError @@ -524,9 +525,10 @@ def _cache(self) -> None: with self._local_staging_file.open() as in_f: _local_data = json.load(in_f) - _local_data |= self._staging + _cache_data = always_merger.merge(_local_data, self._staging) + with self._local_staging_file.open("w", encoding="utf-8") as out_f: - json.dump(_local_data, out_f, indent=2) + json.dump(_cache_data, out_f, indent=2) def to_dict(self) -> dict[str, typing.Any]: return self._get() | self._staging diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 5de51a8a..34381c0f 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -220,9 +220,14 @@ def test_offline_tags(create_plain_run_offline: tuple[sv_run.Run, dict]) -> None @pytest.mark.run def test_update_metadata_running(create_test_run: tuple[sv_run.Run, dict]) -> None: - METADATA = {"a": 10, "b": 1.2, "c": "word"} + METADATA = {"a": 1, "b": 1.2, "c": "word", "d": "new"} run, _ = create_test_run - run.update_metadata(METADATA) + # Add an initial set of metadata + run.update_metadata({"a": 10, "b": 1.2, "c": "word"}) + # Try updating a second time, check original dict isnt overwritten + run.update_metadata({"d": "new"}) + # Try updating an already defined piece of metadata + run.update_metadata({"a": 1}) run.close() time.sleep(1.0) client = sv_cl.Client() @@ -234,9 +239,14 @@ def test_update_metadata_running(create_test_run: tuple[sv_run.Run, dict]) -> No @pytest.mark.run def test_update_metadata_created(create_pending_run: tuple[sv_run.Run, dict]) -> None: - METADATA = {"a": 10, "b": 1.2, "c": "word"} + METADATA = {"a": 1, "b": 1.2, "c": "word", "d": "new"} run, _ = create_pending_run - run.update_metadata(METADATA) + # Add an initial set of metadata + run.update_metadata({"a": 10, "b": 1.2, "c": "word"}) + # Try updating a second time, check original dict isnt overwritten + run.update_metadata({"d": "new"}) + # Try updating an already defined piece of metadata + run.update_metadata({"a": 1}) time.sleep(1.0) client = sv_cl.Client() run_info = client.get_run(run.id) @@ -250,13 +260,21 @@ def test_update_metadata_created(create_pending_run: tuple[sv_run.Run, dict]) -> def test_update_metadata_offline( create_plain_run_offline: tuple[sv_run.Run, dict], ) -> None: - METADATA = {"a": 10, "b": 1.2, "c": "word"} + METADATA = {"a": 1, "b": 1.2, "c": "word", "d": "new"} run, _ = create_plain_run_offline run_name = run._name - run.update_metadata(METADATA) + # Add an initial set of metadata + run.update_metadata({"a": 10, "b": 1.2, "c": "word"}) + # Try updating a second time, check original dict isnt overwritten + run.update_metadata({"d": "new"}) + # Try updating an already defined piece of metadata + run.update_metadata({"a": 1}) + sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) + import pdb; pdb.set_trace() run.close() time.sleep(1.0) + import pdb; pdb.set_trace() client = sv_cl.Client() run_info = client.get_run(client.get_run_id_from_name(run_name)) @@ -655,6 +673,29 @@ def test_update_tags_created( assert sorted(run_data.tags) == sorted(tags + ["additional"]) +@pytest.mark.offline +@pytest.mark.run +def test_update_tags_offline( + create_plain_run_offline: typing.Tuple[sv_run.Run, dict], +) -> None: + simvue_run, _ = create_plain_run_offline + run_name = simvue_run._name + + simvue_run.set_tags(["simvue_client_unit_tests",]) + + simvue_run.update_tags(["additional"]) + + sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) + simvue_run.close() + time.sleep(1.0) + + client = sv_cl.Client() + run_data = client.get_run(client.get_run_id_from_name(run_name)) + + time.sleep(1) + run_data = client.get_run(simvue_run._id) + assert sorted(run_data.tags) == sorted(["simvue_client_unit_tests", "additional"]) + @pytest.mark.run @pytest.mark.parametrize("object_type", ("DataFrame", "ndarray")) def test_save_object( From 683c6f45572a8f95e618fcb00ac561b19d15df0f Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 14 Feb 2025 13:09:21 +0000 Subject: [PATCH 25/56] Remove attach_to_run as it isnt required, replace with add_alerts --- simvue/run.py | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/simvue/run.py b/simvue/run.py index 12c006de..5f31ba21 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -29,7 +29,6 @@ import click import psutil -from simvue.api.objects.alert.base import AlertBase from simvue.api.objects.alert.fetch import Alert from simvue.api.objects.folder import Folder, get_folder_from_path from simvue.exception import ObjectNotFoundError, SimvueRunError @@ -1650,11 +1649,6 @@ def add_alerts( return False - @check_run_initialised - def _attach_alert_to_run(self, alert: AlertBase) -> str | None: - self._sv_obj.alerts = [alert.id] - self._sv_obj.commit() - @skip_if_failed("_aborted", "_suppress_errors", None) @pydantic.validate_call def create_metric_range_alert( @@ -1728,7 +1722,7 @@ def create_metric_range_alert( _alert.abort = trigger_abort _alert.commit() if attach_to_run: - self._attach_alert_to_run(_alert) + self.add_alerts(ids=[_alert.id]) return _alert.id @skip_if_failed("_aborted", "_suppress_errors", None) @@ -1801,7 +1795,7 @@ def create_metric_threshold_alert( _alert.abort = trigger_abort _alert.commit() if attach_to_run: - self._attach_alert_to_run(_alert) + self.add_alerts(ids=[_alert.id]) return _alert.id @skip_if_failed("_aborted", "_suppress_errors", None) @@ -1853,7 +1847,7 @@ def create_event_alert( _alert.abort = trigger_abort _alert.commit() if attach_to_run: - self._attach_alert_to_run(_alert) + self.add_alerts(ids=[_alert.id]) return _alert.id @skip_if_failed("_aborted", "_suppress_errors", None) @@ -1899,7 +1893,7 @@ def create_user_alert( _alert.abort = trigger_abort _alert.commit() if attach_to_run: - self._attach_alert_to_run(_alert) + self.add_alerts(ids=[_alert.id]) return _alert.id @skip_if_failed("_aborted", "_suppress_errors", False) From 58a623b61621d6cfac4a25f529110c55dfaad8fd Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 14 Feb 2025 13:33:00 +0000 Subject: [PATCH 26/56] Added custom merge strategy to override lists --- simvue/api/objects/base.py | 6 +++--- simvue/utilities.py | 17 ++++++++++++++++- tests/functional/test_run_class.py | 5 ++--- 3 files changed, 21 insertions(+), 7 deletions(-) diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index cc683757..b75c8e6e 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -16,8 +16,8 @@ import msgpack import pydantic -from deepmerge import always_merger +from simvue.utilities import staging_merger from simvue.config.user import SimvueConfiguration from simvue.exception import ObjectNotFoundError from simvue.version import __version__ @@ -525,10 +525,10 @@ def _cache(self) -> None: with self._local_staging_file.open() as in_f: _local_data = json.load(in_f) - _cache_data = always_merger.merge(_local_data, self._staging) + staging_merger.merge(_local_data, self._staging) with self._local_staging_file.open("w", encoding="utf-8") as out_f: - json.dump(_cache_data, out_f, indent=2) + json.dump(_local_data, out_f, indent=2) def to_dict(self) -> dict[str, typing.Any]: return self._get() | self._staging diff --git a/simvue/utilities.py b/simvue/utilities.py index 69a8ecd8..fe7746cb 100644 --- a/simvue/utilities.py +++ b/simvue/utilities.py @@ -11,8 +11,8 @@ import os import pathlib import typing - import jwt +from deepmerge import Merger from datetime import timezone from simvue.models import DATETIME_FORMAT @@ -395,3 +395,18 @@ def get_mimetype_for_file(file_path: pathlib.Path) -> str: """Return MIME type for the given file""" _guess, *_ = mimetypes.guess_type(file_path) return _guess or "application/octet-stream" + + +# Create a new Merge strategy for merging local file and staging attributes +staging_merger = Merger( + # pass in a list of tuple, with the + # strategies you are looking to apply + # to each type. + [(list, ["override"]), (dict, ["merge"]), (set, ["union"])], + # next, choose the fallback strategies, + # applied to all other types: + ["override"], + # finally, choose the strategies in + # the case where the types conflict: + ["override"], +) diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 34381c0f..4352e97e 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -269,12 +269,11 @@ def test_update_metadata_offline( run.update_metadata({"d": "new"}) # Try updating an already defined piece of metadata run.update_metadata({"a": 1}) - + sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) - import pdb; pdb.set_trace() run.close() time.sleep(1.0) - import pdb; pdb.set_trace() + client = sv_cl.Client() run_info = client.get_run(client.get_run_id_from_name(run_name)) From e4ac228df42f2de3a4eea8a87e7ed747a7123a3c Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 14 Feb 2025 13:46:10 +0000 Subject: [PATCH 27/56] Add initial sv_obj.alert as an empty list in init --- simvue/run.py | 1 + 1 file changed, 1 insertion(+) diff --git a/simvue/run.py b/simvue/run.py index 5f31ba21..8e821886 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -685,6 +685,7 @@ def init( self._sv_obj.tags = tags self._sv_obj.metadata = (metadata or {}) | git_info(os.getcwd()) | environment() self._sv_obj.heartbeat_timeout = timeout + self._sv_obj.alerts = [] if self._status == "running": self._sv_obj.system = get_system() From 030f85474b78476a2d6264acfd41490d17e565dc Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 14 Feb 2025 16:29:33 +0000 Subject: [PATCH 28/56] Fixed alert retrieval in offline --- simvue/api/objects/run.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index cbc470a3..60cad198 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -226,10 +226,17 @@ def notifications(self, notifications: typing.Literal["none", "email"]) -> None: @property @staging_check def alerts(self) -> list[str]: + if self._offline: + return self._get_attribute("alerts") + return [alert["id"] for alert in self.get_alert_details()] def get_alert_details(self) -> typing.Generator[dict[str, typing.Any], None, None]: """Retrieve the full details of alerts for this run""" + if self._offline: + raise RuntimeError( + "Cannot get alert details from an offline run - use .alerts to access a list of IDs instead" + ) for alert in self._get_attribute("alerts"): yield alert["alert"] From 51fb0a47f7d14b2a19c97e29853ce715236cdc96 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 14 Feb 2025 17:00:17 +0000 Subject: [PATCH 29/56] Fixing add alerts wip --- simvue/run.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/simvue/run.py b/simvue/run.py index 8e821886..db260469 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -1645,10 +1645,11 @@ def add_alerts( return False # Avoid duplication - self._sv_obj.alerts = list(set(self._sv_obj.alerts + ids)) + _deduplicated = list(set(self._sv_obj.alerts + ids)) + self._sv_obj.alerts = _deduplicated self._sv_obj.commit() - return False + return True @skip_if_failed("_aborted", "_suppress_errors", None) @pydantic.validate_call From 64075448850184dd8c2fb8e7f8127aee5883f486 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Fri, 14 Feb 2025 17:13:52 +0000 Subject: [PATCH 30/56] Still fixing add_alert --- simvue/run.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/simvue/run.py b/simvue/run.py index db260469..1c495e12 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -1632,14 +1632,14 @@ def add_alerts( try: if alerts := Alert.get(offline=self._user_config.run.mode == "offline"): for alert in alerts: - if alert.name in names: - ids.append(alert.id) + if alert[1].name in names: + ids.append(alert[1].id) + else: + self._error("No existing alerts") + return False except RuntimeError as e: self._error(f"{e.args[0]}") return False - else: - self._error("No existing alerts") - return False elif not names and not ids: self._error("Need to provide alert ids or alert names") return False From 475cb92a8f30052c604283fc66eecd1e7419f4f4 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Mon, 17 Feb 2025 14:06:33 +0000 Subject: [PATCH 31/56] Create add_alerts test --- tests/functional/test_run_class.py | 88 ++++++++++++++++++++++++++++++ 1 file changed, 88 insertions(+) diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 5de51a8a..0e9d26ab 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -676,6 +676,94 @@ def test_save_object( save_obj = array([1, 2, 3, 4]) simvue_run.save_object(save_obj, "input", f"test_object_{object_type}") +@pytest.mark.run +def test_add_alerts() -> None: + _uuid = f"{uuid.uuid4()}".split("-")[0] + + run = sv_run.Run() + run.init( + name="test_add_alerts", + folder="/simvue_unit_tests", + retention_period="1 min", + tags=["test_add_alerts"], + visibility="tenant" + ) + + _expected_alerts = [] + + # Create alerts, have them attach to run automatically + _id = run.create_event_alert( + name=f"event_alert_{_uuid}", + pattern = "test", + ) + _expected_alerts.append(_id) + time.sleep(1) + # Retrieve run, check if alert has been added + _online_run = RunObject(identifier=run._id) + assert _id in _online_run.alerts + + # Create another alert and attach to run + _id = run.create_metric_range_alert( + name=f"metric_range_alert_{_uuid}", + metric="test", + range_low=10, + range_high=100, + rule="is inside range", + ) + _expected_alerts.append(_id) + time.sleep(1) + # Retrieve run, check both alerts have been added + _online_run.refresh() + assert sorted(_online_run.alerts) == sorted(_expected_alerts) + + # Create another alert, do not attach to run + _id = run.create_metric_threshold_alert( + name=f"metric_threshold_alert_{_uuid}", + metric="test", + threshold=10, + rule="is above", + attach_to_run=False + ) + time.sleep(1) + # Retrieve run, check alert has NOT been added + _online_run.refresh() + assert sorted(_online_run.alerts) == sorted(_expected_alerts) + + # Try adding all three alerts using add_alerts + _expected_alerts.append(_id) + run.add_alerts(names=[f"event_alert_{_uuid}", f"metric_range_alert_{_uuid}", f"metric_threshold_alert_{_uuid}"]) + time.sleep(1) + + # Check that there is no duplication + _online_run.refresh() + assert sorted(_online_run.alerts) == sorted(_expected_alerts) + + # Create another run without adding to run + _id = run.create_user_alert( + name=f"user_alert_{_uuid}", + attach_to_run=False + ) + time.sleep(1) + + # Check alert is not added + _online_run.refresh() + assert sorted(_online_run.alerts) == sorted(_expected_alerts) + + # Try adding alerts with IDs, check there is no duplication + _expected_alerts.append(_id) + run.add_alerts(ids=_expected_alerts) + time.sleep(1) + + _online_run.refresh() + assert sorted(_online_run.alerts) == sorted(_expected_alerts) + + run.close() + + client = sv_cl.Client() + client.delete_run(run._id) + for _id in _expected_alerts: + client.delete_alert(_id) + @pytest.mark.run def test_abort_on_alert_process(mocker: pytest_mock.MockerFixture) -> None: From f884daa86cd87547a37fe34144ab50e421a567e2 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Mon, 17 Feb 2025 14:08:28 +0000 Subject: [PATCH 32/56] Moved get status to under sleep in test --- tests/functional/test_run_class.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 0e9d26ab..7bb897b4 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -844,8 +844,8 @@ def testing_exit(status: int) -> None: run.add_process(identifier="forever_long", executable="bash", c="sleep 10") time.sleep(2) run.log_alert(alert_id, "critical") - _alert = Alert(identifier=alert_id) time.sleep(1) + _alert = Alert(identifier=alert_id) assert _alert.get_status(run.id) == "critical" counter = 0 while run._status != "terminated" and counter < 15: From 28b84b23047744960f1e03b2e978eba7894cd109 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Mon, 17 Feb 2025 14:21:56 +0000 Subject: [PATCH 33/56] =?UTF-8?q?Remove=20artifact=20changes=20which=20sho?= =?UTF-8?q?uldnt=20be=20in=20this=20PR=C2=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- simvue/api/objects/artifact/fetch.py | 76 ++++++---------------------- 1 file changed, 16 insertions(+), 60 deletions(-) diff --git a/simvue/api/objects/artifact/fetch.py b/simvue/api/objects/artifact/fetch.py index 1d571266..88f582ce 100644 --- a/simvue/api/objects/artifact/fetch.py +++ b/simvue/api/objects/artifact/fetch.py @@ -23,62 +23,6 @@ def __new__(cls, identifier: str | None = None, **kwargs): else: return ObjectArtifact(identifier=identifier, **kwargs) - @classmethod - def from_run( - cls, - run_id: str, - category: typing.Literal["input", "output", "code"] | None = None, - **kwargs, - ) -> typing.Generator[tuple[str, FileArtifact | ObjectArtifact], None, None]: - """Return artifacts associated with a given run. - - Parameters - ---------- - run_id : str - The ID of the run to retriece artifacts from - category : typing.Literal["input", "output", "code"] | None, optional - The category of artifacts to return, by default all artifacts are returned - - Returns - ------- - typing.Generator[tuple[str, FileArtifact | ObjectArtifact], None, None] - The artifacts - - Yields - ------ - Iterator[typing.Generator[tuple[str, FileArtifact | ObjectArtifact], None, None]] - identifier for artifact - the artifact itself as a class instance - - Raises - ------ - ObjectNotFoundError - Raised if artifacts could not be found for that run - """ - _temp = ArtifactBase(**kwargs) - _url = URL(_temp._user_config.server.url) / f"runs/{run_id}/artifacts" - _response = sv_get( - url=f"{_url}", params={"category": category}, headers=_temp._headers - ) - _json_response = get_json_from_response( - expected_type=list, - response=_response, - expected_status=[http.HTTPStatus.OK, http.HTTPStatus.NOT_FOUND], - scenario=f"Retrieval of artifacts for run '{run_id}'", - ) - - if _response.status_code == http.HTTPStatus.NOT_FOUND or not _json_response: - raise ObjectNotFoundError( - _temp._label, category, extra=f"for run '{run_id}'" - ) - - for _entry in _json_response: - _id = _entry.pop("id") - yield ( - _id, - Artifact(_local=True, _read_only=True, identifier=_id, **_entry), - ) - @classmethod def from_name( cls, run_id: str, name: str, **kwargs @@ -155,9 +99,21 @@ def get( if (_data := _json_response.get("data")) is None: raise RuntimeError(f"Expected key 'data' for retrieval of {_label}s") + _out_dict: dict[str, FileArtifact | ObjectArtifact] = {} + for _entry in _data: _id = _entry.pop("id") - yield ( - _id, - Artifact(_local=True, _read_only=True, identifier=_id, **_entry), - ) + if _entry["original_path"]: + yield ( + _id, + FileArtifact( + _local=True, _read_only=True, identifier=_id, **_entry + ), + ) + else: + yield ( + _id, + ObjectArtifact( + _local=True, _read_only=True, identifier=_id, **_entry + ), + ) From f1a1e69b3581271719deb6622d615c7c814737d5 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Mon, 17 Feb 2025 14:54:10 +0000 Subject: [PATCH 34/56] Fix alerts setter --- simvue/api/objects/run.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 60cad198..371cf29c 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -244,9 +244,7 @@ def get_alert_details(self) -> typing.Generator[dict[str, typing.Any], None, Non @write_only @pydantic.validate_call def alerts(self, alerts: list[str]) -> None: - self._staging["alerts"] = [ - alert for alert in alerts if alert not in self._staging.get("alerts", []) - ] + self._staging["alerts"] = list(set(self._staging.get("alerts", []) + alerts)) @property @staging_check From 8e30ffc4780425fdc14ade71de2d3843f476a113 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Mon, 17 Feb 2025 14:54:10 +0000 Subject: [PATCH 35/56] Fix alerts setter --- simvue/api/objects/run.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 3801f26c..f8d75c1e 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -245,9 +245,7 @@ def get_alert_details(self) -> typing.Generator[dict[str, typing.Any], None, Non @write_only @pydantic.validate_call def alerts(self, alerts: list[str]) -> None: - self._staging["alerts"] = [ - alert for alert in alerts if alert not in self._staging.get("alerts", []) - ] + self._staging["alerts"] = list(set(self._staging.get("alerts", []) + alerts)) @property @staging_check From ad2f13d9b08497eb15e13410f679642b914e6d72 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 17 Feb 2025 16:43:42 +0000 Subject: [PATCH 36/56] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/PyCQA/bandit.git: 1.8.2 → 1.8.3](https://github.com/PyCQA/bandit.git/compare/1.8.2...1.8.3) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 49f60d2a..09520ac8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -35,7 +35,7 @@ repos: pass_filenames: false - repo: https://github.com/PyCQA/bandit.git - rev: 1.8.2 + rev: 1.8.3 hooks: - id: bandit args: [-lll, --recursive, clumper] From 777a4432b3041b56a9f715d2dbed96147b122bb7 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Mon, 17 Feb 2025 18:06:14 +0000 Subject: [PATCH 37/56] Remove existing folder check and rely on 409 from server --- simvue/api/objects/base.py | 2 +- simvue/run.py | 15 ++++++--------- 2 files changed, 7 insertions(+), 10 deletions(-) diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 7f43904f..16f2af13 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -423,7 +423,7 @@ def _post(self, is_json: bool = True, **kwargs) -> dict[str, typing.Any]: _json_response = get_json_from_response( response=_response, - expected_status=[http.HTTPStatus.OK], + expected_status=[http.HTTPStatus.OK, http.HTTPStatus.CONFLICT], scenario=f"Creation of {self._label}", ) diff --git a/simvue/run.py b/simvue/run.py index af7a5aa0..1ae01f28 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -31,8 +31,8 @@ from simvue.api.objects.alert.base import AlertBase from simvue.api.objects.alert.fetch import Alert -from simvue.api.objects.folder import Folder, get_folder_from_path -from simvue.exception import ObjectNotFoundError, SimvueRunError +from simvue.api.objects.folder import Folder +from simvue.exception import SimvueRunError from simvue.utilities import prettify_pydantic @@ -621,13 +621,10 @@ def init( self._term_color = not no_color - try: - self._folder = get_folder_from_path(path=folder) - except ObjectNotFoundError: - self._folder = Folder.new( - path=folder, offline=self._user_config.run.mode == "offline" - ) - self._folder.commit() # type: ignore + self._folder = Folder.new( + path=folder, offline=self._user_config.run.mode == "offline" + ) + self._folder.commit() # type: ignore if isinstance(visibility, str) and visibility not in ("public", "tenant"): self._error( From cb0221358629ff4edd2c0732756139cc686e74c5 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 18 Feb 2025 12:15:04 +0000 Subject: [PATCH 38/56] Add randomname generator if run mode is offline --- simvue/run.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/simvue/run.py b/simvue/run.py index 954d1743..58fafe97 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -25,7 +25,7 @@ import typing import warnings import uuid - +import randomname import click import psutil @@ -647,6 +647,8 @@ def init( if name and not re.match(r"^[a-zA-Z0-9\-\_\s\/\.:]+$", name): self._error("specified name is invalid") return False + elif not name and self._user_config.run.mode != "online": + name = randomname.get_name() self._name = name From 8912d8ac7c41c8297834fd6ac719c906680008d1 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 18 Feb 2025 12:15:31 +0000 Subject: [PATCH 39/56] Add randomname generator if run mode is offline --- simvue/run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/simvue/run.py b/simvue/run.py index 58fafe97..d02709f2 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -647,7 +647,7 @@ def init( if name and not re.match(r"^[a-zA-Z0-9\-\_\s\/\.:]+$", name): self._error("specified name is invalid") return False - elif not name and self._user_config.run.mode != "online": + elif not name and self._user_config.run.mode == "offline": name = randomname.get_name() self._name = name From 6d77bde6bebe8606e48c111fed16d4e16710f6ef Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 18 Feb 2025 13:02:39 +0000 Subject: [PATCH 40/56] Removed server url and token validation checks if mode is offline --- simvue/api/objects/base.py | 12 ++++++++---- simvue/config/parameters.py | 28 ++++++++++++++++------------ simvue/config/user.py | 4 ++-- simvue/run.py | 18 +++++++++++++----- 4 files changed, 39 insertions(+), 23 deletions(-) diff --git a/simvue/api/objects/base.py b/simvue/api/objects/base.py index 16f2af13..334af761 100644 --- a/simvue/api/objects/base.py +++ b/simvue/api/objects/base.py @@ -164,10 +164,14 @@ def __init__( ) ) - self._headers: dict[str, str] = { - "Authorization": f"Bearer {self._user_config.server.token.get_secret_value()}", - "User-Agent": _user_agent or f"Simvue Python client {__version__}", - } + self._headers: dict[str, str] = ( + { + "Authorization": f"Bearer {self._user_config.server.token.get_secret_value()}", + "User-Agent": _user_agent or f"Simvue Python client {__version__}", + } + if not self._offline + else {} + ) self._staging: dict[str, typing.Any] = {} diff --git a/simvue/config/parameters.py b/simvue/config/parameters.py index 472811f6..eb26e523 100644 --- a/simvue/config/parameters.py +++ b/simvue/config/parameters.py @@ -21,25 +21,29 @@ class ServerSpecifications(pydantic.BaseModel): - url: pydantic.AnyHttpUrl - token: pydantic.SecretStr + url: pydantic.AnyHttpUrl | None + token: pydantic.SecretStr | None @pydantic.field_validator("url") @classmethod def url_to_api_url(cls, v: typing.Any) -> str: - if f"{v}".endswith("/api"): - return f"{v}" - _url = URL(f"{v}") / "api" - return f"{_url}" + if v: + if f"{v}".endswith("/api"): + return f"{v}" + _url = URL(f"{v}") / "api" + return f"{_url}" @pydantic.field_validator("token") def check_token(cls, v: typing.Any) -> str: - value = v.get_secret_value() - if not (expiry := get_expiry(value)): - raise AssertionError("Failed to parse Simvue token - invalid token form") - if time.time() - expiry > 0: - raise AssertionError("Simvue token has expired") - return v + if v: + value = v.get_secret_value() + if not (expiry := get_expiry(value)): + raise AssertionError( + "Failed to parse Simvue token - invalid token form" + ) + if time.time() - expiry > 0: + raise AssertionError("Simvue token has expired") + return v class OfflineSpecifications(pydantic.BaseModel): diff --git a/simvue/config/user.py b/simvue/config/user.py index 931d2fa5..22357ce5 100644 --- a/simvue/config/user.py +++ b/simvue/config/user.py @@ -212,10 +212,10 @@ def fetch( _run_mode = mode or _config_dict["run"].get("mode") or "online" - if not _server_url: + if not _server_url and _run_mode != "offline": raise RuntimeError("No server URL was specified") - if not _server_token: + if not _server_token and _run_mode != "offline": raise RuntimeError("No server token was specified") _config_dict["server"]["token"] = _server_token diff --git a/simvue/run.py b/simvue/run.py index 1ae01f28..a618f04c 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -185,9 +185,13 @@ def __init__( if self._user_config.metrics.resources_metrics_interval < 1 else self._user_config.metrics.resources_metrics_interval ) - self._headers: dict[str, str] = { - "Authorization": f"Bearer {self._user_config.server.token.get_secret_value()}" - } + self._headers: dict[str, str] = ( + { + "Authorization": f"Bearer {self._user_config.server.token.get_secret_value()}" + } + if mode != "offline" + else {} + ) self._sv_obj: RunObject | None = None self._pid: int | None = 0 self._shutdown_event: threading.Event | None = None @@ -419,7 +423,9 @@ def _create_dispatch_callback( if self._user_config.run.mode == "online" and not self._id: raise RuntimeError("Expected identifier for run") - if not self._user_config.server.url or not self._sv_obj: + if ( + self._user_config.run.mode != "offline" and not self._user_config.server.url + ) or not self._sv_obj: raise RuntimeError("Cannot commence dispatch, run not initialised") def _dispatch_callback( @@ -635,7 +641,9 @@ def init( self._error("invalid mode specified, must be online, offline or disabled") return False - if not self._user_config.server.token or not self._user_config.server.url: + if self._user_config.run.mode != "offline" and ( + not self._user_config.server.token or not self._user_config.server.url + ): self._error( "Unable to get URL and token from environment variables or config file" ) From e916238fb20a2edac1a01b3d27842de7622f7d24 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 18 Feb 2025 15:05:31 +0000 Subject: [PATCH 41/56] Added created to simvue run --- simvue/api/objects/run.py | 6 ++++++ simvue/run.py | 1 + 2 files changed, 7 insertions(+) diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index f8d75c1e..11328341 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -256,6 +256,12 @@ def created(self) -> datetime.datetime | None: datetime.datetime.strptime(_created, DATETIME_FORMAT) if _created else None ) + @created.setter + @write_only + @pydantic.validate_call + def created(self, created: datetime.datetime) -> None: + self._staging["created"] = created.strftime(DATETIME_FORMAT) + @property @staging_check def runtime(self) -> datetime.datetime | None: diff --git a/simvue/run.py b/simvue/run.py index d02709f2..2ee70686 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -689,6 +689,7 @@ def init( self._sv_obj.metadata = (metadata or {}) | git_info(os.getcwd()) | environment() self._sv_obj.heartbeat_timeout = timeout self._sv_obj.alerts = [] + self._sv_obj.created = time.time() if self._status == "running": self._sv_obj.system = get_system() From 7a82c8766b638f83f365b90ce2bd87589024f4a5 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 18 Feb 2025 15:20:13 +0000 Subject: [PATCH 42/56] Removed api from url printed to screen on run start --- simvue/run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/simvue/run.py b/simvue/run.py index 2ee70686..9b015ba6 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -718,7 +718,7 @@ def init( fg="green" if self._term_color else None, ) click.secho( - f"[simvue] Monitor in the UI at {self._user_config.server.url}/dashboard/runs/run/{self._id}", + f"[simvue] Monitor in the UI at {self._user_config.server.url.rsplit('/api', 1)[0]}/dashboard/runs/run/{self._id}", bold=self._term_color, fg="green" if self._term_color else None, ) From 8f53b2b71ed5b2e1294bd2cd82955ea797c10c61 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 18 Feb 2025 16:45:15 +0000 Subject: [PATCH 43/56] Suppressed runtime error in log_event for tracebacks --- simvue/run.py | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/simvue/run.py b/simvue/run.py index 9b015ba6..484a8299 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -248,19 +248,12 @@ def _handle_exception_throw( else f"An exception was thrown: {_exception_thrown}" ) - self.log_event(_event_msg) - self.set_status("terminated" if _is_terminated else "failed") - # If the dispatcher has already been aborted then this will # fail so just continue without the event with contextlib.suppress(RuntimeError): - self.log_event(f"{_exception_thrown}: {value}") - - if not traceback: - return + self.log_event(_event_msg) - with contextlib.suppress(RuntimeError): - self.log_event(f"Traceback: {traceback}") + self.set_status("terminated" if _is_terminated else "failed") def __exit__( self, From 7f486575c358082b0e2ffac8f8b1a0ce28054399 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Tue, 18 Feb 2025 17:05:25 +0000 Subject: [PATCH 44/56] Fixed toml file finder --- simvue/utilities.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/simvue/utilities.py b/simvue/utilities.py index 69a8ecd8..d5fca8ba 100644 --- a/simvue/utilities.py +++ b/simvue/utilities.py @@ -50,16 +50,17 @@ def find_first_instance_of_file( if isinstance(file_names, str): file_names = [file_names] - for root, _, files in os.walk(os.getcwd(), topdown=False): - for file_name in file_names: - if file_name in files: - return pathlib.Path(root).joinpath(file_name) + for file_name in file_names: + _user_file = pathlib.Path.cwd().joinpath(file_name) + if _user_file.exists(): + return _user_file # If the user is running on different mounted volume or outside # of their user space then the above will not return the file if check_user_space: for file_name in file_names: - if os.path.exists(_user_file := pathlib.Path.home().joinpath(file_name)): + _user_file = pathlib.Path.home().joinpath(file_name) + if _user_file.exists(): return _user_file return None From 1a28c7639ce23080c10eaa37dce2d9f20569aa62 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 19 Feb 2025 11:15:00 +0000 Subject: [PATCH 45/56] Parameterized log_metrics tests to include timestamp --- tests/functional/test_run_class.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index 84807e54..b447659b 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -14,7 +14,7 @@ import pathlib import concurrent.futures import random - +import datetime import simvue from simvue.api.objects.alert.fetch import Alert from simvue.exception import SimvueRunError @@ -59,12 +59,14 @@ def test_run_with_emissions() -> None: @pytest.mark.run +@pytest.mark.parametrize("timestamp", (datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f"), None), ids=("timestamp", "no_timestamp")) @pytest.mark.parametrize("overload_buffer", (True, False), ids=("overload", "normal")) @pytest.mark.parametrize( "visibility", ("bad_option", "tenant", "public", ["ciuser01"], None) ) def test_log_metrics( overload_buffer: bool, + timestamp: str | None, setup_logging: "CountingLogHandler", mocker, request: pytest.FixtureRequest, @@ -112,9 +114,9 @@ def test_log_metrics( if overload_buffer: for i in range(run._dispatcher._max_buffer_size * 3): - run.log_metrics({key: i for key in METRICS}) + run.log_metrics({key: i for key in METRICS}, timestamp=timestamp) else: - run.log_metrics(METRICS) + run.log_metrics(METRICS, timestamp=timestamp) time.sleep(2.0 if overload_buffer else 1.0) run.close() client = sv_cl.Client() From 56cead2b6a41434c72a15aebf57bdf0cc10c7789 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 19 Feb 2025 11:32:55 +0000 Subject: [PATCH 46/56] Correct format of notifications getter and setter --- simvue/api/objects/run.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/simvue/api/objects/run.py b/simvue/api/objects/run.py index 9841ded5..dfb21c1f 100644 --- a/simvue/api/objects/run.py +++ b/simvue/api/objects/run.py @@ -214,14 +214,16 @@ def heartbeat_timeout(self, time_seconds: int | None) -> None: @property @staging_check - def notifications(self) -> typing.Literal["none", "email"]: - return self._get_attribute("notifications") + def notifications(self) -> typing.Literal["none", "all", "error", "lost"]: + return self._get_attribute("notifications")["state"] @notifications.setter @write_only @pydantic.validate_call - def notifications(self, notifications: typing.Literal["none", "email"]) -> None: - self._staging["notifications"] = notifications + def notifications( + self, notifications: typing.Literal["none", "all", "error", "lost"] + ) -> None: + self._staging["notifications"] = {"state": notifications} @property @staging_check From 01c20caf1059acd0b59a7147b014e5cc4ff6ec05 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 19 Feb 2025 11:53:21 +0000 Subject: [PATCH 47/56] Add notification option to run init --- simvue/run.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/simvue/run.py b/simvue/run.py index af7a5aa0..027e5e71 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -565,6 +565,7 @@ def init( folder: typing.Annotated[ str, pydantic.Field(None, pattern=FOLDER_REGEX) ] = None, + notification: typing.Literal["none", "all", "error", "lost"] = "none", running: bool = True, retention_period: str | None = None, timeout: int | None = 180, @@ -586,6 +587,9 @@ def init( description of the run, by default None folder : str, optional folder within which to store the run, by default "/" + notification: typing.Literal["none", "all", "error", "lost"], optional + whether to notify the user by email upon completion of the run if + the run is in the specified state, by default "none" running : bool, optional whether to set the status as running or created, the latter implying the run will be commenced at a later time. Default is True. @@ -686,6 +690,7 @@ def init( self._sv_obj.tags = tags self._sv_obj.metadata = (metadata or {}) | git_info(os.getcwd()) | environment() self._sv_obj.heartbeat_timeout = timeout + self._sv_obj.notifications = notification if self._status == "running": self._sv_obj.system = get_system() From 01c9b9d91c36bc0ea934f3d53d4e7016ed8fa53c Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 19 Feb 2025 12:04:40 +0000 Subject: [PATCH 48/56] Only change status to running in _start if not already set to that --- simvue/run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/simvue/run.py b/simvue/run.py index 027e5e71..141ca3cc 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -465,7 +465,7 @@ def _start(self, reconnect: bool = False) -> bool: logger.debug("Starting run") - if self._sv_obj: + if self._sv_obj and self._sv_obj.status != "running": self._sv_obj.status = self._status self._sv_obj.commit() From 2f9882f8266d77e513c80837f90576f45e09e003 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 19 Feb 2025 14:38:45 +0000 Subject: [PATCH 49/56] Respond to MR comments --- simvue/config/parameters.py | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/simvue/config/parameters.py b/simvue/config/parameters.py index eb26e523..c6d65c93 100644 --- a/simvue/config/parameters.py +++ b/simvue/config/parameters.py @@ -26,24 +26,24 @@ class ServerSpecifications(pydantic.BaseModel): @pydantic.field_validator("url") @classmethod - def url_to_api_url(cls, v: typing.Any) -> str: - if v: - if f"{v}".endswith("/api"): - return f"{v}" - _url = URL(f"{v}") / "api" - return f"{_url}" + def url_to_api_url(cls, v: typing.Any) -> str | None: + if not v: + return + if f"{v}".endswith("/api"): + return f"{v}" + _url = URL(f"{v}") / "api" + return f"{_url}" @pydantic.field_validator("token") - def check_token(cls, v: typing.Any) -> str: - if v: - value = v.get_secret_value() - if not (expiry := get_expiry(value)): - raise AssertionError( - "Failed to parse Simvue token - invalid token form" - ) - if time.time() - expiry > 0: - raise AssertionError("Simvue token has expired") - return v + def check_token(cls, v: typing.Any) -> str | None: + if not v: + return + value = v.get_secret_value() + if not (expiry := get_expiry(value)): + raise AssertionError("Failed to parse Simvue token - invalid token form") + if time.time() - expiry > 0: + raise AssertionError("Simvue token has expired") + return v class OfflineSpecifications(pydantic.BaseModel): From 45be82ff36c076268d99215d345fbe47c92bd2c1 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 19 Feb 2025 15:01:35 +0000 Subject: [PATCH 50/56] Simplify conversion from alert names to ids --- simvue/run.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/simvue/run.py b/simvue/run.py index e9defa0d..c4e92cc2 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -1639,9 +1639,7 @@ def add_alerts( if names and not ids: try: if alerts := Alert.get(offline=self._user_config.run.mode == "offline"): - for alert in alerts: - if alert[1].name in names: - ids.append(alert[1].id) + ids += [id for id, alert in alerts if alert.name in names] else: self._error("No existing alerts") return False From 12046afd108635906960c7ade40513483ea76229 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 19 Feb 2025 15:25:44 +0000 Subject: [PATCH 51/56] Update pyproject, citation and changelog for a1 release --- CHANGELOG.md | 18 ++++++++++++++++++ CITATION.cff | 4 ++-- pyproject.toml | 2 +- 3 files changed, 21 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2d6b4967..454d19a5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,23 @@ # Change log +## [v2.0.0-alpha0](https://github.com/simvue-io/client/releases/tag/v2.0.0a1) - 2025-02-19 +* Fixed `add_alerts` so that it now works with both IDs and names +* Improved alert and folder deduplication methods to rely on 409 responses from server upon creation +* Added `attach_to_run` option to create alerts methods so that alerts can be created without a run attached +* Improved merging of local staging file and _staged dict using `deepmerge` - fixes bugs with tags, alerts and metadata in offline mode +* Added `started`, `created` and `ended` timestamps to runs in offline mode +* Remove all erronous server calls in offline mode +* Fixed method to find simvue.toml config files, now just looks in cwd and home +* Added run notification option to `run.init` so that users can now get emails upon their runs completing +* Fixed artifact retrieval by run so that `category` parameter works correctly +* Fixed bug where file artifacts wouldn't be saved correctly in offline mode if sender runs in different location to script +* Fixed bug where DEBUG log messages were spamming to the console +* Fixed link to run dashboard printed to the console by removing `/api` +* Fixed bug where offline mode wouldn't work if no run name provided +* Fixed bug where errors would be thrown if a traceback was logged as an event when a run was already terminated +* Fixed hierarchical artifact retrieval to maintain directory structure +* Loosened Numpy requirement to >2.0.0 + ## [v2.0.0-alpha0](https://github.com/simvue-io/client/releases/tag/v2.0.0a0) - 2025-02-10 * Add support for defining Simvue run defaults using `tool.simvue` in a project `pyproject.toml` file. diff --git a/CITATION.cff b/CITATION.cff index b39358d6..10c11055 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -43,8 +43,8 @@ keywords: - simulation license: Apache-2.0 commit: 5aaebe682d7ec2f80fefc3eb2a8a26f5bdca1e0c -version: 2.0.0a0 -date-released: '2025-02-11' +version: 2.0.0a1 +date-released: '2025-02-19' references: - title: mlco2/codecarbon version: v2.8.2 diff --git a/pyproject.toml b/pyproject.toml index 3f30a658..d22de055 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "simvue" -version = "2.0.0a0" +version = "2.0.0a1" description = "Simulation tracking and monitoring" authors = [ {name = "Simvue Development Team", email = "info@simvue.io"} From f4a9e2185d0c4528859f2fe83bb03bdf6405eab7 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 19 Feb 2025 15:56:43 +0000 Subject: [PATCH 52/56] Update citation and changelog --- CHANGELOG.md | 2 +- CITATION.cff | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 454d19a5..2a07ae1e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # Change log -## [v2.0.0-alpha0](https://github.com/simvue-io/client/releases/tag/v2.0.0a1) - 2025-02-19 +## [v2.0.0-alpha1](https://github.com/simvue-io/client/releases/tag/v2.0.0a1) - 2025-02-19 * Fixed `add_alerts` so that it now works with both IDs and names * Improved alert and folder deduplication methods to rely on 409 responses from server upon creation * Added `attach_to_run` option to create alerts methods so that alerts can be created without a run attached diff --git a/CITATION.cff b/CITATION.cff index 10c11055..6f95e894 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -42,7 +42,7 @@ keywords: - alerting - simulation license: Apache-2.0 -commit: 5aaebe682d7ec2f80fefc3eb2a8a26f5bdca1e0c +commit: be7fa45b785c0a8a8c5ecd3fbb0dc798606cc968 version: 2.0.0a1 date-released: '2025-02-19' references: From 7ba6d06697268e9650e9ac02bbc008f4b3b0fe96 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 19 Feb 2025 17:00:36 +0000 Subject: [PATCH 53/56] Fix missing start time --- simvue/run.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/simvue/run.py b/simvue/run.py index c4e92cc2..42bdfd6a 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -465,8 +465,9 @@ def _start(self, reconnect: bool = False) -> bool: self._start_time = time.time() - if self._sv_obj and self._sv_obj.status != "running": - self._sv_obj.status = self._status + if self._sv_obj: + if self._sv_obj.status != "running": + self._sv_obj.status = self._status self._sv_obj.started = self._start_time self._sv_obj.commit() From 124b2993d91dbff2e475aa972916b11e7bd02fa4 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 19 Feb 2025 17:06:51 +0000 Subject: [PATCH 54/56] Reduce redundant commit calls --- simvue/run.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/simvue/run.py b/simvue/run.py index 42bdfd6a..6906ab74 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -466,10 +466,15 @@ def _start(self, reconnect: bool = False) -> bool: self._start_time = time.time() if self._sv_obj: + _changed = False if self._sv_obj.status != "running": self._sv_obj.status = self._status - self._sv_obj.started = self._start_time - self._sv_obj.commit() + _changed = True + if self._user_config.run.mode == "offline": + self._sv_obj.started = self._start_time + _changed = True + if _changed: + self._sv_obj.commit() if self._pid == 0: self._pid = os.getpid() From 010abe5d941a5d6be47f3884d18a070bf6c0f458 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Wed, 19 Feb 2025 17:07:25 +0000 Subject: [PATCH 55/56] Update citation --- CITATION.cff | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CITATION.cff b/CITATION.cff index 6f95e894..d9026ae2 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -42,7 +42,7 @@ keywords: - alerting - simulation license: Apache-2.0 -commit: be7fa45b785c0a8a8c5ecd3fbb0dc798606cc968 +commit: 124b2993d91dbff2e475aa972916b11e7bd02fa4 version: 2.0.0a1 date-released: '2025-02-19' references: From 51a433790e834832a5e800d768e116be23f7d7a8 Mon Sep 17 00:00:00 2001 From: Matt Field Date: Thu, 20 Feb 2025 08:34:49 +0000 Subject: [PATCH 56/56] fixed bug where None.closed file is created if closing a non initialized run --- poetry.lock | 10 ++++++---- simvue/run.py | 6 +++++- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 574bac78..ed998f4c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1320,22 +1320,22 @@ files = [ [[package]] name = "narwhals" -version = "1.26.0" +version = "1.27.1" description = "Extremely lightweight compatibility layer between dataframe libraries" optional = true python-versions = ">=3.8" groups = ["main"] markers = "python_version <= \"3.11\" and extra == \"plot\" or python_version >= \"3.12\" and extra == \"plot\"" files = [ - {file = "narwhals-1.26.0-py3-none-any.whl", hash = "sha256:4af8bbdea9e45638bb9a981568a8dfa880e40eb7dcf740d19fd32aea79223c6f"}, - {file = "narwhals-1.26.0.tar.gz", hash = "sha256:b9d7605bf1d97a9d87783a69748c39150964e2a1ab0e5a6fef3e59e56772639e"}, + {file = "narwhals-1.27.1-py3-none-any.whl", hash = "sha256:71e4a126007886e3dd9d71d0d5921ebd2e8c1f9be9c405fe11850ece2b066c59"}, + {file = "narwhals-1.27.1.tar.gz", hash = "sha256:68505d0cee1e6c00382ac8b65e922f8b694a11cbe482a057fa63139de8d0ea03"}, ] [package.extras] core = ["duckdb", "pandas", "polars", "pyarrow", "pyarrow-stubs"] cudf = ["cudf (>=24.10.0)"] dask = ["dask[dataframe] (>=2024.8)"] -dev = ["covdefaults", "hypothesis", "pre-commit", "pytest", "pytest-cov", "pytest-env", "pytest-randomly", "typing-extensions"] +dev = ["covdefaults", "hypothesis", "mypy (>=1.15.0,<1.16.0)", "pandas-stubs", "pre-commit", "pytest", "pytest-cov", "pytest-env", "pytest-randomly", "typing-extensions"] docs = ["black", "duckdb", "jinja2", "markdown-exec[ansi]", "mkdocs", "mkdocs-autorefs", "mkdocs-material", "mkdocstrings[python]", "pandas", "polars (>=1.0.0)", "pyarrow"] duckdb = ["duckdb (>=1.0)"] extra = ["scikit-learn"] @@ -1345,6 +1345,8 @@ pandas = ["pandas (>=0.25.3)"] polars = ["polars (>=0.20.3)"] pyarrow = ["pyarrow (>=11.0.0)"] pyspark = ["pyspark (>=3.5.0)"] +tests = ["covdefaults", "hypothesis", "pytest", "pytest-cov", "pytest-env", "pytest-randomly", "typing-extensions"] +typing = ["mypy (>=1.15.0,<1.16.0)", "pandas-stubs", "typing-extensions"] [[package]] name = "numpy" diff --git a/simvue/run.py b/simvue/run.py index 6906ab74..5bfda8b3 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -1521,7 +1521,11 @@ def _tidy_run(self) -> None: self._dispatcher.purge() self._dispatcher.join() - if self._user_config.run.mode == "offline" and self._status != "created": + if ( + self._sv_obj + and self._user_config.run.mode == "offline" + and self._status != "created" + ): self._user_config.offline.cache.joinpath( "runs", f"{self._id}.closed" ).touch()