19
19
20
20
21
21
@pytest .mark .client
22
+ @pytest .mark .object_retrieval
22
23
def test_get_events (create_test_run : tuple [sv_run .Run , dict ]) -> None :
23
24
client = svc .Client ()
24
25
assert client .get_events (run_id = create_test_run [1 ]["run_id" ])
25
26
26
27
27
28
@pytest .mark .client
29
+ @pytest .mark .object_retrieval
28
30
@pytest .mark .parametrize ("from_run" , (True , False ), ids = ("from_run" , "all_runs" ))
29
31
@pytest .mark .parametrize ("names_only" , (True , False ), ids = ("names_only" , "all_details" ))
30
32
@pytest .mark .parametrize (
@@ -96,6 +98,7 @@ def test_get_alerts(
96
98
97
99
98
100
@pytest .mark .client
101
+ @pytest .mark .object_retrieval
99
102
def test_get_run_id_from_name (create_test_run : tuple [sv_run .Run , dict ]) -> None :
100
103
client = svc .Client ()
101
104
assert (
@@ -105,6 +108,7 @@ def test_get_run_id_from_name(create_test_run: tuple[sv_run.Run, dict]) -> None:
105
108
106
109
107
110
@pytest .mark .client
111
+ @pytest .mark .object_retrieval
108
112
@pytest .mark .parametrize (
109
113
"aggregate,use_name_labels" ,
110
114
[(True , False ), (False , False ), (False , True )],
@@ -138,6 +142,7 @@ def test_get_metric_values(
138
142
139
143
140
144
@pytest .mark .client
145
+ @pytest .mark .object_retrieval
141
146
def test_plot_metrics (create_test_run : tuple [sv_run .Run , dict ]) -> None :
142
147
try :
143
148
import matplotlib
@@ -153,6 +158,7 @@ def test_plot_metrics(create_test_run: tuple[sv_run.Run, dict]) -> None:
153
158
154
159
155
160
@pytest .mark .client
161
+ @pytest .mark .object_retrieval
156
162
@pytest .mark .parametrize (
157
163
"sorting" ,
158
164
([("metadata.test_identifier" , True )], [("name" , True ), ("created" , True )], None ),
@@ -169,6 +175,7 @@ def test_get_artifacts_entries(
169
175
170
176
171
177
@pytest .mark .client
178
+ @pytest .mark .object_retrieval
172
179
@pytest .mark .parametrize ("file_id" , (1 , 2 , 3 ), ids = lambda x : f"file_{ x } " )
173
180
def test_get_artifact_as_file (
174
181
create_test_run : tuple [sv_run .Run , dict ], file_id : int
@@ -187,6 +194,7 @@ def test_get_artifact_as_file(
187
194
188
195
189
196
@pytest .mark .client
197
+ @pytest .mark .object_retrieval
190
198
@pytest .mark .parametrize ("category" , (None , "code" , "input" , "output" ))
191
199
def test_get_artifacts_as_files (
192
200
create_test_run : tuple [sv_run .Run , dict ],
@@ -216,6 +224,7 @@ def test_get_artifacts_as_files(
216
224
217
225
218
226
@pytest .mark .client
227
+ @pytest .mark .object_retrieval
219
228
@pytest .mark .parametrize (
220
229
"output_format,sorting" ,
221
230
[
@@ -243,12 +252,14 @@ def test_get_runs(
243
252
244
253
245
254
@pytest .mark .client
255
+ @pytest .mark .object_retrieval
246
256
def test_get_run (create_test_run : tuple [sv_run .Run , dict ]) -> None :
247
257
client = svc .Client ()
248
258
assert client .get_run (run_id = create_test_run [1 ]["run_id" ])
249
259
250
260
251
261
@pytest .mark .client
262
+ @pytest .mark .object_retrieval
252
263
@pytest .mark .parametrize (
253
264
"sorting" ,
254
265
(None , [("metadata.test_identifier" , True ), ("path" , True )], [("modified" , False )]),
@@ -265,6 +276,7 @@ def test_get_folders(
265
276
266
277
267
278
@pytest .mark .client
279
+ @pytest .mark .object_retrieval
268
280
def test_get_metrics_names (create_test_run : tuple [sv_run .Run , dict ]) -> None :
269
281
client = svc .Client ()
270
282
attempts : int = 0
@@ -281,6 +293,7 @@ def test_get_metrics_names(create_test_run: tuple[sv_run.Run, dict]) -> None:
281
293
282
294
283
295
@pytest .mark .client
296
+ @pytest .mark .object_retrieval
284
297
def test_get_tag (create_plain_run : tuple [sv_run .Run , dict ]) -> None :
285
298
_ , run_data = create_plain_run
286
299
client = svc .Client ()
@@ -297,6 +310,7 @@ def test_get_tag(create_plain_run: tuple[sv_run.Run, dict]) -> None:
297
310
298
311
299
312
@pytest .mark .client
313
+ @pytest .mark .object_removal
300
314
def test_run_deletion () -> None :
301
315
run = sv_run .Run ()
302
316
run .init (
@@ -314,6 +328,7 @@ def test_run_deletion() -> None:
314
328
315
329
316
330
@pytest .mark .client
331
+ @pytest .mark .object_removal
317
332
def test_runs_deletion () -> None :
318
333
_runs = [sv_run .Run () for _ in range (5 )]
319
334
for i , run in enumerate (_runs ):
@@ -332,6 +347,7 @@ def test_runs_deletion() -> None:
332
347
333
348
334
349
@pytest .mark .client
350
+ @pytest .mark .object_retrieval
335
351
def test_get_tags () -> None :
336
352
_uuid = f"{ uuid .uuid4 ()} " .split ("-" )[0 ]
337
353
tags = ["simvue_unit_testing" , "test_get_tags" , "testing" , _uuid ]
@@ -358,6 +374,7 @@ def test_get_tags() -> None:
358
374
359
375
360
376
@pytest .mark .client
377
+ @pytest .mark .object_removal
361
378
def test_folder_deletion () -> None :
362
379
run = sv_run .Run ()
363
380
_temp_folder_id : str = f"{ uuid .uuid4 ()} " .split ()[0 ]
@@ -386,6 +403,7 @@ def test_folder_deletion() -> None:
386
403
387
404
388
405
@pytest .mark .client
406
+ @pytest .mark .object_retrieval
389
407
def test_run_folder_metadata_find () -> None :
390
408
_uuid : str = f"{ uuid .uuid4 ()} " .split ()[0 ]
391
409
with sv_run .Run () as run :
@@ -404,6 +422,7 @@ def test_run_folder_metadata_find() -> None:
404
422
405
423
406
424
@pytest .mark .client
425
+ @pytest .mark .object_removal
407
426
def test_tag_deletion () -> None :
408
427
with sv_run .Run () as run :
409
428
unique_id = f"{ uuid .uuid4 ()} " .split ("-" )[0 ]
@@ -424,6 +443,7 @@ def test_tag_deletion() -> None:
424
443
425
444
426
445
@pytest .mark .client
446
+ @pytest .mark .object_retrieval
427
447
@pytest .mark .parametrize ("aggregate" , (True , False ), ids = ("aggregated" , "normal" ))
428
448
@pytest .mark .parametrize ("output_format" , ("dict" , "dataframe" ))
429
449
@pytest .mark .parametrize ("xaxis" , ("step" , "time" , "timestamp" ))
@@ -461,6 +481,7 @@ def test_multiple_metric_retrieval(
461
481
462
482
463
483
@pytest .mark .client
484
+ @pytest .mark .object_removal
464
485
def test_alert_deletion () -> None :
465
486
_alert = sv_api_obj .UserAlert .new (
466
487
name = "test_alert" , notification = "none" , description = None
@@ -473,6 +494,7 @@ def test_alert_deletion() -> None:
473
494
474
495
475
496
@pytest .mark .client
497
+ @pytest .mark .object_removal
476
498
def test_abort_run (speedy_heartbeat , create_plain_run : tuple [sv_run .Run , dict ]) -> None :
477
499
run , run_data = create_plain_run
478
500
_uuid = f"{ uuid .uuid4 ()} " .split ("-" )[0 ]
0 commit comments