Skip to content

Commit 2f44e61

Browse files
committed
🧪 Correct folder deletion test
1 parent a5eaf18 commit 2f44e61

File tree

1 file changed

+56
-95
lines changed

1 file changed

+56
-95
lines changed

‎tests/functional/test_client.py

Lines changed: 56 additions & 95 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@
1515
import simvue.api.objects as sv_api_obj
1616
from simvue.api.objects.alert.base import AlertBase
1717

18-
1918
@pytest.mark.dependency
2019
@pytest.mark.client
2120
def test_get_events(create_test_run: tuple[sv_run.Run, dict]) -> None:
@@ -25,8 +24,12 @@ def test_get_events(create_test_run: tuple[sv_run.Run, dict]) -> None:
2524

2625
@pytest.mark.dependency
2726
@pytest.mark.client
28-
@pytest.mark.parametrize("from_run", (True, False), ids=("from_run", "all_runs"))
29-
@pytest.mark.parametrize("names_only", (True, False), ids=("names_only", "all_details"))
27+
@pytest.mark.parametrize(
28+
"from_run", (True, False), ids=("from_run", "all_runs")
29+
)
30+
@pytest.mark.parametrize(
31+
"names_only", (True, False), ids=("names_only", "all_details")
32+
)
3033
@pytest.mark.parametrize(
3134
"critical_only", (True, False), ids=("critical_only", "all_states")
3235
)
@@ -40,44 +43,42 @@ def test_get_alerts(
4043
run_id = run.id
4144
unique_id = f"{uuid.uuid4()}".split("-")[0]
4245
_id_1 = run.create_user_alert(
43-
name=f"user_alert_1_{unique_id}",
46+
name=f"user_alert_1_{unique_id}",
4447
)
4548
run.create_user_alert(
46-
name=f"user_alert_2_{unique_id}",
49+
name=f"user_alert_2_{unique_id}",
50+
)
51+
run.create_user_alert(
52+
name=f"user_alert_3_{unique_id}",
53+
attach_to_run=False
4754
)
48-
run.create_user_alert(name=f"user_alert_3_{unique_id}", attach_to_run=False)
4955
run.log_alert(identifier=_id_1, state="critical")
5056
time.sleep(2)
5157
run.close()
52-
58+
5359
client = svc.Client()
5460

5561
if critical_only and not from_run:
5662
with pytest.raises(RuntimeError) as e:
57-
_alerts = client.get_alerts(
58-
critical_only=critical_only, names_only=names_only
59-
)
60-
assert (
61-
"critical_only is ambiguous when returning alerts with no run ID specified."
62-
in str(e.value)
63-
)
63+
_alerts = client.get_alerts(critical_only=critical_only, names_only=names_only)
64+
assert "critical_only is ambiguous when returning alerts with no run ID specified." in str(e.value)
6465
else:
6566
sorting = None if run_id else [("name", True), ("created", True)]
6667
_alerts = client.get_alerts(
6768
run_id=run_id if from_run else None,
6869
critical_only=critical_only,
6970
names_only=names_only,
70-
sort_by_columns=sorting,
71+
sort_by_columns=sorting
7172
)
72-
73+
7374
if names_only:
7475
assert all(isinstance(item, str) for item in _alerts)
7576
else:
76-
assert all(isinstance(item, AlertBase) for item in _alerts)
77+
assert all(isinstance(item, AlertBase) for item in _alerts)
7778
_alerts = [alert.name for alert in _alerts]
78-
79+
7980
assert f"user_alert_1_{unique_id}" in _alerts
80-
81+
8182
if not from_run:
8283
assert len(_alerts) > 2
8384
assert f"user_alert_3_{unique_id}" in _alerts
@@ -89,7 +90,6 @@ def test_get_alerts(
8990
assert len(_alerts) == 2
9091
assert f"user_alert_2_{unique_id}" in _alerts
9192

92-
9393
@pytest.mark.dependency
9494
@pytest.mark.client
9595
def test_get_run_id_from_name(create_test_run: tuple[sv_run.Run, dict]) -> None:
@@ -104,8 +104,12 @@ def test_get_run_id_from_name(create_test_run: tuple[sv_run.Run, dict]) -> None:
104104
@pytest.mark.client
105105
@pytest.mark.parametrize(
106106
"aggregate,use_name_labels",
107-
[(True, False), (False, False), (False, True)],
108-
ids=("aggregate", "complete_ids", "complete_labels"),
107+
[
108+
(True, False),
109+
(False, False),
110+
(False, True)
111+
],
112+
ids=("aggregate", "complete_ids", "complete_labels")
109113
)
110114
def test_get_metric_values(
111115
create_test_run: tuple[sv_run.Run, dict], aggregate: bool, use_name_labels: bool
@@ -126,9 +130,9 @@ def test_get_metric_values(
126130
assert create_test_run[1]["metrics"][0] in _metrics_dict.keys()
127131
if aggregate:
128132
_value_types = {i[1] for i in _first_entry}
129-
assert all(i in _value_types for i in ("average", "min", "max")), (
130-
f"Expected ('average', 'min', 'max') in {_value_types}"
131-
)
133+
assert all(
134+
i in _value_types for i in ("average", "min", "max")
135+
), f"Expected ('average', 'min', 'max') in {_value_types}"
132136
elif not use_name_labels:
133137
_runs = {i[1] for i in _first_entry}
134138
assert create_test_run[1]["run_id"] in _runs
@@ -153,17 +157,12 @@ def test_plot_metrics(create_test_run: tuple[sv_run.Run, dict]) -> None:
153157
@pytest.mark.dependency
154158
@pytest.mark.client
155159
@pytest.mark.parametrize(
156-
"sorting",
157-
([("metadata.test_identifier", True)], [("name", True), ("created", True)], None),
158-
ids=("sorted-metadata", "sorted-name-created", None),
160+
"sorting", ([("metadata.test_identifier", True)], [("name", True), ("created", True)], None),
161+
ids=("sorted-metadata", "sorted-name-created", None)
159162
)
160-
def test_get_artifacts_entries(
161-
create_test_run: tuple[sv_run.Run, dict], sorting: list[tuple[str, bool]] | None
162-
) -> None:
163+
def test_get_artifacts_entries(create_test_run: tuple[sv_run.Run, dict], sorting: list[tuple[str, bool]] | None) -> None:
163164
client = svc.Client()
164-
assert dict(
165-
client.list_artifacts(create_test_run[1]["run_id"], sort_by_columns=sorting)
166-
)
165+
assert dict(client.list_artifacts(create_test_run[1]["run_id"], sort_by_columns=sorting))
167166
assert client.get_artifact(create_test_run[1]["run_id"], name="test_attributes")
168167

169168

@@ -181,9 +180,7 @@ def test_get_artifact_as_file(
181180
name=_file_name,
182181
output_dir=tempd,
183182
)
184-
assert pathlib.Path(tempd).joinpath(_file_name).exists(), (
185-
f"Failed to download '{_file_name}'"
186-
)
183+
assert pathlib.Path(tempd).joinpath(_file_name).exists(), f"Failed to download '{_file_name}'"
187184

188185

189186
@pytest.mark.dependency
@@ -199,7 +196,7 @@ def test_get_artifacts_as_files(
199196
create_test_run[1]["run_id"], category=category, output_dir=tempd
200197
)
201198
files = [os.path.basename(i) for i in glob.glob(os.path.join(tempd, "*"))]
202-
199+
203200
if not category:
204201
expected_files = ["file_1", "file_2", "file_3"]
205202
elif category == "input":
@@ -208,7 +205,7 @@ def test_get_artifacts_as_files(
208205
expected_files = ["file_2"]
209206
elif category == "code":
210207
expected_files = ["file_3"]
211-
208+
212209
for file in ["file_1", "file_2", "file_3"]:
213210
if file in expected_files:
214211
assert create_test_run[1][file] in files
@@ -225,18 +222,12 @@ def test_get_artifacts_as_files(
225222
("dataframe", [("created", True), ("started", True)]),
226223
("objects", [("metadata.test_identifier", True)]),
227224
],
228-
ids=("dict-unsorted", "dataframe-datesorted", "objects-metasorted"),
225+
ids=("dict-unsorted", "dataframe-datesorted", "objects-metasorted")
229226
)
230-
def test_get_runs(
231-
create_test_run: tuple[sv_run.Run, dict],
232-
output_format: str,
233-
sorting: list[tuple[str, bool]] | None,
234-
) -> None:
227+
def test_get_runs(create_test_run: tuple[sv_run.Run, dict], output_format: str, sorting: list[tuple[str, bool]] | None) -> None:
235228
client = svc.Client()
236229

237-
_result = client.get_runs(
238-
filters=[], output_format=output_format, count_limit=10, sort_by_columns=sorting
239-
)
230+
_result = client.get_runs(filters=[], output_format=output_format, count_limit=10, sort_by_columns=sorting)
240231

241232
if output_format == "dataframe":
242233
assert not _result.empty
@@ -254,13 +245,10 @@ def test_get_run(create_test_run: tuple[sv_run.Run, dict]) -> None:
254245
@pytest.mark.dependency
255246
@pytest.mark.client
256247
@pytest.mark.parametrize(
257-
"sorting",
258-
(None, [("metadata.test_identifier", True), ("path", True)], [("modified", False)]),
259-
ids=("no-sort", "sort-path-metadata", "sort-modified"),
248+
"sorting", (None, [("metadata.test_identifier", True), ("path", True)], [("modified", False)]),
249+
ids=("no-sort", "sort-path-metadata", "sort-modified")
260250
)
261-
def test_get_folders(
262-
create_test_run: tuple[sv_run.Run, dict], sorting: list[tuple[str, bool]] | None
263-
) -> None:
251+
def test_get_folders(create_test_run: tuple[sv_run.Run, dict], sorting: list[tuple[str, bool]] | None) -> None:
264252
client = svc.Client()
265253
assert (folders := client.get_folders(sort_by_columns=sorting))
266254
_id, _folder = next(folders)
@@ -289,12 +277,7 @@ def test_get_tag(create_plain_run: tuple[sv_run.Run, dict]) -> None:
289277
@pytest.mark.client
290278
def test_run_deletion() -> None:
291279
run = sv_run.Run()
292-
run.init(
293-
name="test_run_deletion",
294-
folder="/simvue_unit_testing",
295-
tags=["test_run_deletion"],
296-
retention_period="1 min",
297-
)
280+
run.init(name="test_run_deletion", folder="/simvue_unit_testing", tags=["test_run_deletion"], retention_period="1 min")
298281
run.log_metrics({"x": 2})
299282
run.close()
300283
client = svc.Client()
@@ -308,18 +291,13 @@ def test_run_deletion() -> None:
308291
def test_runs_deletion() -> None:
309292
_runs = [sv_run.Run() for _ in range(5)]
310293
for i, run in enumerate(_runs):
311-
run.init(
312-
name="test_runs_deletion",
313-
folder="/simvue_unit_testing/runs_batch",
314-
tags=["test_runs_deletion"],
315-
retention_period="1 min",
316-
)
294+
run.init(name="test_runs_deletion", folder="/simvue_unit_testing/runs_batch", tags=["test_runs_deletion"], retention_period="1 min")
317295
run.log_metrics({"x": i})
318296
client = svc.Client()
319297
assert len(client.delete_runs("/simvue_unit_testing/runs_batch")) > 0
320298
for run in _runs:
321299
with pytest.raises(ObjectNotFoundError):
322-
client.get_run(run.id)
300+
client.get_run(run.id)
323301

324302

325303
@pytest.mark.dependency
@@ -339,26 +317,14 @@ def test_get_tags(create_plain_run: tuple[sv_run.Run, dict]) -> None:
339317
def test_folder_deletion() -> None:
340318
run = sv_run.Run()
341319
_temp_folder_id: str = f"{uuid.uuid4()}".split()[0]
342-
run.init(
343-
name="test_folder_deletion",
344-
folder=f"/simvue_unit_testing/{_temp_folder_id}",
345-
tags=["test_folder_deletion"],
346-
retention_period="1 min",
347-
)
320+
run.init(name="test_folder_deletion", folder=f"/simvue_unit_testing/{_temp_folder_id}", tags=["test_folder_deletion"], retention_period="1 min")
348321
run.close()
349322
client = svc.Client()
350323
# This test is called last, one run created so expect length 1
351-
assert (
352-
len(
353-
client.delete_folder(
354-
f"/simvue_unit_testing/{_temp_folder_id}", remove_runs=True
355-
)
356-
)
357-
== 1
358-
)
359-
time.sleep(10)
360-
with pytest.raises(ObjectNotFoundError):
361-
client.get_folder("/simvue_unit_testing/delete_me")
324+
assert len(client.delete_folder(f"/simvue_unit_testing/{_temp_folder_id}", remove_runs=True)) == 1
325+
326+
# If the folder has been deleted then an ObjectNotFoundError should be raised
327+
assert not client.get_folder(f"/simvue_unit_testing/{_temp_folder_id}")
362328
with pytest.raises(ObjectNotFoundError):
363329
client.get_run(run_id=run.id)
364330

@@ -367,24 +333,19 @@ def test_folder_deletion() -> None:
367333
def test_run_folder_metadata_find(create_plain_run: tuple[sv_run.Run, dict]) -> None:
368334
run, run_data = create_plain_run
369335
rand_val = random.randint(0, 1000)
370-
run.set_folder_details(metadata={"atest": rand_val})
336+
run.set_folder_details(metadata={'atest': rand_val})
371337
run.close()
372338
time.sleep(1.0)
373339
client = svc.Client()
374-
data = client.get_folders(filters=[f"metadata.atest == {rand_val}"])
340+
data = client.get_folders(filters=[f'metadata.atest == {rand_val}'])
375341

376342
assert run_data["folder"] in [i.path for _, i in data]
377343

378344

379345
@pytest.mark.client
380346
def test_tag_deletion() -> None:
381347
run = sv_run.Run()
382-
run.init(
383-
name="test_folder_deletion",
384-
folder="/simvue_unit_testing",
385-
tags=["test_tag_deletion"],
386-
retention_period="1 min",
387-
)
348+
run.init(name="test_folder_deletion", folder="/simvue_unit_testing", tags=["test_tag_deletion"], retention_period="1 min")
388349
run.close()
389350
unique_id = f"{uuid.uuid4()}".split("-")[0]
390351
run.update_tags([(tag_str := f"delete_me_{unique_id}")])
@@ -438,9 +399,7 @@ def test_multiple_metric_retrieval(
438399

439400
@pytest.mark.client
440401
def test_alert_deletion() -> None:
441-
_alert = sv_api_obj.UserAlert.new(
442-
name="test_alert", notification="none", description=None
443-
)
402+
_alert = sv_api_obj.UserAlert.new(name="test_alert", notification="none", description=None)
444403
_alert.commit()
445404
_client = svc.Client()
446405
_client.delete_alert(alert_id=_alert.id)
@@ -464,3 +423,5 @@ def test_abort_run(speedy_heartbeat, create_plain_run: tuple[sv_run.Run, dict])
464423
except AssertionError:
465424
time.sleep(2)
466425
assert run._status == "terminated"
426+
427+

0 commit comments

Comments
 (0)