diff --git a/.github/workflows/test_client_ubuntu.yml b/.github/workflows/test_client_ubuntu.yml index cd87d224..76c53121 100644 --- a/.github/workflows/test_client_ubuntu.yml +++ b/.github/workflows/test_client_ubuntu.yml @@ -91,7 +91,7 @@ jobs: SIMVUE_TOKEN: ${{ secrets.SIMVUE_TOKEN }} run: >- python -m pytest tests/functional/ -x - -m online -c /dev/null -p no:warnings + -m online -m "not eco" -c /dev/null -p no:warnings -n 0 -v -o cache_dir=${GITHUB_WORKSPACE}/.pytest-cache offline_functional_tests: runs-on: ubuntu-latest diff --git a/simvue/factory/proxy/base.py b/simvue/factory/proxy/base.py index 6f35e691..da3991fb 100644 --- a/simvue/factory/proxy/base.py +++ b/simvue/factory/proxy/base.py @@ -14,8 +14,8 @@ def __init__( self._logger = logging.getLogger(f"simvue.{self.__class__.__name__}") self._suppress_errors: bool = suppress_errors self._uuid: str = uniq_id - self._name: str | None = name - self._id: int | None = None + self.name: str | None = name + self.id: int | None = None self._aborted: bool = False def _error(self, message: str) -> None: diff --git a/simvue/factory/proxy/offline.py b/simvue/factory/proxy/offline.py index 650c0d36..4347e1d7 100644 --- a/simvue/factory/proxy/offline.py +++ b/simvue/factory/proxy/offline.py @@ -74,8 +74,8 @@ def create_run(self, data) -> tuple[str, str | None]: self._logger.error("No directory specified") return (None, None) - if not self._name: - self._name = randomname.get_name() + if not self.name: + self.name = randomname.get_name() try: os.makedirs(self._directory, exist_ok=True) @@ -98,7 +98,7 @@ def create_run(self, data) -> tuple[str, str | None]: filename = f"{self._directory}/{status}" create_file(filename) - return self._name, self._id + return self.name, self.id @skip_if_failed("_aborted", "_suppress_errors", None) def update(self, data) -> dict[str, typing.Any] | None: @@ -171,7 +171,7 @@ def set_alert_state( with open(_alert_file) as alert_in: _alert_data = json.load(alert_in) - _alert_data |= {"run": self._id, "alert": alert_id, "status": status} + _alert_data |= {"run": self.id, "alert": alert_id, "status": status} self._write_json(_alert_file, _alert_data) diff --git a/simvue/factory/proxy/remote.py b/simvue/factory/proxy/remote.py index c1860dac..ea268606 100644 --- a/simvue/factory/proxy/remote.py +++ b/simvue/factory/proxy/remote.py @@ -39,14 +39,14 @@ def __init__( } super().__init__(name, uniq_id, suppress_errors) - self._id = uniq_id + self.id = uniq_id @skip_if_failed("_aborted", "_suppress_errors", None) def list_tags(self) -> list[str]: logger.debug("Retrieving existing tags") try: response = get( - f"{self._user_config.server.url}/runs/{self._id}", self._headers + f"{self._user_config.server.url}/runs/{self.id}", self._headers ) except Exception as err: self._error(f"Exception retrieving tags: {str(err)}") @@ -120,12 +120,12 @@ def create_run(self, data) -> tuple[str, str | None]: return (None, None) if "name" in response.json(): - self._name = response.json()["name"] + self.name = response.json()["name"] if "id" in response.json(): - self._id = response.json()["id"] + self.id = response.json()["id"] - return self._name, self._id + return self.name, self.id @skip_if_failed("_aborted", "_suppress_errors", None) def update( @@ -134,8 +134,8 @@ def update( """ Update metadata, tags or status """ - if self._id: - data["id"] = self._id + if self.id: + data["id"] = self.id logger.debug('Updating run with data: "%s"', data) @@ -296,7 +296,7 @@ def save_file(self, data: dict[str, typing.Any]) -> dict[str, typing.Any] | None return None if storage_id: - path = f"{self._user_config.server.url}/runs/{self._id}/artifacts" + path = f"{self._user_config.server.url}/runs/{self.id}/artifacts" data["storage"] = storage_id try: @@ -350,7 +350,7 @@ def set_alert_state(self, alert_id, status) -> dict[str, typing.Any] | None: """ Set alert state """ - data = {"run": self._id, "alert": alert_id, "status": status} + data = {"run": self.id, "alert": alert_id, "status": status} try: response = put( f"{self._user_config.server.url}/alerts/status", self._headers, data @@ -445,7 +445,7 @@ def send_heartbeat(self) -> dict[str, typing.Any] | None: response = put( f"{self._user_config.server.url}/runs/heartbeat", self._headers, - {"id": self._id}, + {"id": self.id}, ) except Exception as err: self._error(f"Exception creating run: {str(err)}") @@ -465,7 +465,7 @@ def get_abort_status(self) -> bool: try: response = get( - f"{self._user_config.server.url}/runs/{self._id}/abort", + f"{self._user_config.server.url}/runs/{self.id}/abort", self._headers_mp, ) except Exception as err: diff --git a/simvue/run.py b/simvue/run.py index f18dea1f..d00fd56c 100644 --- a/simvue/run.py +++ b/simvue/run.py @@ -257,7 +257,9 @@ def __exit__( ) -> None: logger.debug( "Automatically closing run '%s' in status %s", - self.id if self._user_config.run.mode == "online" else "unregistered", + self.id + if self._user_config.run.mode == "online" and self._sv_obj + else "unregistered", self._status, ) @@ -940,7 +942,10 @@ def executor(self) -> Executor: def name(self) -> str | None: """Return the name of the run""" if not self._sv_obj: - raise RuntimeError("Run has not been initialised") + logger.warning( + "Attempted to get name on non initialized run - returning None" + ) + return None return self._sv_obj.name @property @@ -954,7 +959,10 @@ def status( ): """Return the status of the run""" if not self._sv_obj: - raise RuntimeError("Run has not been initialised") + logger.warning( + "Attempted to get name on non initialized run - returning cached value" + ) + return self._status return self._sv_obj.status @property @@ -966,7 +974,10 @@ def uid(self) -> str: def id(self) -> str | None: """Return the unique id of the run""" if not self._sv_obj: - raise RuntimeError("Run has not been initialised") + logger.warning( + "Attempted to get name on non initialized run - returning None" + ) + return None return self._sv_obj.id @skip_if_failed("_aborted", "_suppress_errors", False) @@ -986,9 +997,8 @@ def reconnect(self, run_id: str) -> bool: """ self._status = "running" - self._id = run_id self._sv_obj = RunObject(identifier=run_id, _read_only=False) - self._name = self._sv_obj.name + self._sv_obj.status = self._status self._sv_obj.system = get_system() self._sv_obj.commit() diff --git a/tests/functional/test_run_class.py b/tests/functional/test_run_class.py index d0ce57a6..7c30567e 100644 --- a/tests/functional/test_run_class.py +++ b/tests/functional/test_run_class.py @@ -56,7 +56,7 @@ def test_check_run_initialised_decorator() -> None: def test_run_with_emissions_online(speedy_heartbeat, mock_co2_signal, create_plain_run) -> None: run_created, _ = create_plain_run run_created._user_config.eco.co2_signal_api_token = "test_token" - run_created.config(enable_emission_metrics=True) + run_created.config(enable_emission_metrics=True, system_metrics_interval=1) time.sleep(5) _run = RunObject(identifier=run_created.id) _metric_names = [item[0] for item in _run.metrics] @@ -158,7 +158,9 @@ def test_log_metrics( retention_period="1 hour", visibility=visibility, ) - run.config(system_metrics_interval=1) + # Will log system metrics on startup, and then not again within timeframe of test + # So should have exactly one measurement of this + run.config(system_metrics_interval=100) return run.init( @@ -171,8 +173,9 @@ def test_log_metrics( visibility=visibility, retention_period="1 hour", ) - run.config(system_metrics_interval=1) - + # Will log system metrics on startup, and then not again within timeframe of test + # So should have exactly one measurement of this + run.config(system_metrics_interval=100) # Speed up the read rate for this test run._dispatcher._max_buffer_size = 10 run._dispatcher._max_read_rate *= 10 @@ -186,14 +189,14 @@ def test_log_metrics( run.close() client = sv_cl.Client() _data = client.get_metric_values( - run_ids=[run._id], + run_ids=[run.id], metric_names=list(METRICS.keys()), xaxis="step", aggregate=False, ) with contextlib.suppress(RuntimeError): - client.delete_run(run._id) + client.delete_run(run.id) assert _data @@ -221,7 +224,7 @@ def test_log_metrics( def test_log_metrics_offline(create_plain_run_offline: tuple[sv_run.Run, dict]) -> None: METRICS = {"a": 10, "b": 1.2, "c": 2} run, _ = create_plain_run_offline - run_name = run._name + run_name = run.name run.log_metrics(METRICS) time.sleep(1) sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) @@ -277,7 +280,7 @@ def test_visibility_online( retention_period="1 hour", ) time.sleep(1) - _id = run._id + _id = run.id run.close() _retrieved_run = RunObject(identifier=_id) @@ -330,7 +333,7 @@ def test_visibility_offline( retention_period="1 hour", ) time.sleep(1) - _id = run._id + _id = run.id _id_mapping = sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) run.close() _retrieved_run = RunObject(identifier=_id_mapping.get(_id)) @@ -361,7 +364,7 @@ def test_log_events_online(create_test_run: tuple[sv_run.Run, dict]) -> None: def test_log_events_offline(create_plain_run_offline: tuple[sv_run.Run, dict]) -> None: EVENT_MSG = "Hello offline world!" run, _ = create_plain_run_offline - run_name = run._name + run_name = run.name run.log_event(EVENT_MSG) time.sleep(1) sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) @@ -431,7 +434,7 @@ def test_update_metadata_offline( ) -> None: METADATA = {"a": 1, "b": 1.2, "c": "word", "d": "new"} run, _ = create_plain_run_offline - run_name = run._name + run_name = run.name # Add an initial set of metadata run.update_metadata({"a": 10, "b": 1.2, "c": "word"}) # Try updating a second time, check original dict isnt overwritten @@ -476,7 +479,7 @@ def thread_func(index: int) -> tuple[int, list[dict[str, typing.Any]], str]: metric = {f"var_{index + 1}": random.random()} metrics.append(metric) run.log_metrics(metric) - return index, metrics, run._id + return index, metrics, run.id with concurrent.futures.ThreadPoolExecutor(max_workers=N_RUNS) as executor: futures = [executor.submit(thread_func, i) for i in range(N_RUNS)] @@ -532,7 +535,7 @@ def thread_func(index: int) -> tuple[int, list[dict[str, typing.Any]], str]: client = sv_cl.Client() - for i, run_id in enumerate((run_1._id, run_2._id)): + for i, run_id in enumerate((run_1.id, run_2.id)): assert metrics assert client.get_metric_values( run_ids=[run_id], @@ -543,8 +546,8 @@ def thread_func(index: int) -> tuple[int, list[dict[str, typing.Any]], str]: ) with contextlib.suppress(RuntimeError): - client.delete_run(run_1._id) - client.delete_run(run_2._id) + client.delete_run(run_1.id) + client.delete_run(run_2.id) @pytest.mark.run @@ -567,7 +570,7 @@ def test_runs_multiple_series(request: pytest.FixtureRequest) -> None: folder="/simvue_unit_testing", retention_period="1 hour", ) - run_ids.append(run._id) + run_ids.append(run.id) for _ in range(10): time.sleep(1) metric = {f"var_{index}": random.random()} @@ -752,7 +755,7 @@ def test_save_file_offline( capfd, ) -> None: simvue_run, _ = create_plain_run_offline - run_name = simvue_run._name + run_name = simvue_run.name file_type: str = "text/plain" with tempfile.TemporaryDirectory() as tempd: with open( @@ -768,13 +771,6 @@ def test_save_file_offline( preserve_path=preserve_path, name=name, ) - - simvue_run.save_file( - out_name, - category=category, - preserve_path=preserve_path, - name=name, - ) sv_send.sender(os.environ["SIMVUE_OFFLINE_DIRECTORY"], 2, 10) simvue_run.close() time.sleep(1.0) @@ -812,13 +808,13 @@ def test_update_tags_running( time.sleep(1) client = sv_cl.Client() - run_data = client.get_run(simvue_run._id) + run_data = client.get_run(simvue_run.id) assert sorted(run_data.tags) == sorted(tags) simvue_run.update_tags(["additional"]) time.sleep(1) - run_data = client.get_run(simvue_run._id) + run_data = client.get_run(simvue_run.id) assert sorted(run_data.tags) == sorted(tags + ["additional"]) @@ -838,13 +834,13 @@ def test_update_tags_created( time.sleep(1) client = sv_cl.Client() - run_data = client.get_run(simvue_run._id) + run_data = client.get_run(simvue_run.id) assert sorted(run_data.tags) == sorted(tags) simvue_run.update_tags(["additional"]) time.sleep(1) - run_data = client.get_run(simvue_run._id) + run_data = client.get_run(simvue_run.id) assert sorted(run_data.tags) == sorted(tags + ["additional"]) @@ -854,7 +850,7 @@ def test_update_tags_offline( create_plain_run_offline: tuple[sv_run.Run, dict], ) -> None: simvue_run, _ = create_plain_run_offline - run_name = simvue_run._name + run_name = simvue_run.name simvue_run.set_tags( [ @@ -872,7 +868,7 @@ def test_update_tags_offline( run_data = client.get_run(client.get_run_id_from_name(run_name)) time.sleep(1) - run_data = client.get_run(simvue_run._id) + run_data = client.get_run(simvue_run.id) assert sorted(run_data.tags) == sorted(["simvue_client_unit_tests", "additional"]) @@ -921,7 +917,7 @@ def test_add_alerts() -> None: _expected_alerts.append(_id) time.sleep(1) # Retrieve run, check if alert has been added - _online_run = RunObject(identifier=run._id) + _online_run = RunObject(identifier=run.id) assert _id in _online_run.alerts # Create another alert and attach to run @@ -985,7 +981,7 @@ def test_add_alerts() -> None: run.close() client = sv_cl.Client() - client.delete_run(run._id) + client.delete_run(run.id) for _id in _expected_alerts: client.delete_alert(_id) @@ -1002,7 +998,7 @@ def test_log_alert() -> None: tags=["test_add_alerts"], visibility="tenant", ) - _run_id = run._id + _run_id = run.id # Create a user alert _id = run.create_user_alert( name=f"user_alert_{_uuid}", @@ -1068,7 +1064,7 @@ def abort_callback(abort_run=trigger) -> None: assert len(child_processes := process.children(recursive=True)) == 3 time.sleep(2) client = sv_cl.Client() - client.abort_run(run._id, reason="testing abort") + client.abort_run(run.id, reason="testing abort") time.sleep(4) assert run._system_metrics_interval == 1 for child in child_processes: @@ -1145,7 +1141,7 @@ def test_run_created_with_no_timeout() -> None: timeout=None, ) client = simvue.Client() - assert client.get_run(run._id) + assert client.get_run(run.id) @pytest.mark.parametrize("mode", ("online", "offline"), ids=("online", "offline")) diff --git a/tests/functional/test_scenarios.py b/tests/functional/test_scenarios.py index 1a4042c4..de6a1ea7 100644 --- a/tests/functional/test_scenarios.py +++ b/tests/functional/test_scenarios.py @@ -103,7 +103,7 @@ def delete_run(): def upload(name: str, values_per_run: int, shared_dict) -> None: run = simvue.Run() run.init(name=name, tags=["simvue_client_tests"]) - shared_dict["ident"] = run._id + shared_dict["ident"] = run.id for i in range(values_per_run): run.log_metrics({"increment": i}) run.close()