Skip to content

Fix consistency of folders tags for ci test runs #348

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
May 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion tests/functional/test_run_offline_folder.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def test_basic_run_folder(self):
pass

name = "test-%s" % str(uuid.uuid4())
folder = "/test-%s" % str(uuid.uuid4())
folder = "/simvue_unit_testing"
metadata = {str(uuid.uuid4()): 100 * random.random()}
run = Run("offline")
run.init(name, folder=folder)
Expand Down
27 changes: 16 additions & 11 deletions tests/refactor/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,48 +42,53 @@ def log_messages(caplog):


@pytest.fixture
def create_test_run() -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]:
def create_test_run(request) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]:
with sv_run.Run() as run:
yield run, setup_test_run(run, True)
yield run, setup_test_run(run, True, request)


@pytest.fixture
def create_test_run_offline(mocker: pytest_mock.MockerFixture) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]:
def create_test_run_offline(mocker: pytest_mock.MockerFixture, request) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]:
with tempfile.TemporaryDirectory() as temp_d:
mocker.patch.object(simvue.utilities, "get_offline_directory", lambda *_: temp_d)
with sv_run.Run("offline") as run:
yield run, setup_test_run(run, True)
yield run, setup_test_run(run, True, request)


@pytest.fixture
def create_plain_run() -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]:
def create_plain_run(request) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]:
with sv_run.Run() as run:
yield run, setup_test_run(run, False)
yield run, setup_test_run(run, False, request)


@pytest.fixture
def create_plain_run_offline(mocker: pytest_mock.MockerFixture) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]:
def create_plain_run_offline(mocker: pytest_mock.MockerFixture, request) -> typing.Generator[typing.Tuple[sv_run.Run, dict], None, None]:
with tempfile.TemporaryDirectory() as temp_d:
mocker.patch.object(simvue.utilities, "get_offline_directory", lambda *_: temp_d)
with sv_run.Run("offline") as run:

yield run, setup_test_run(run, False)
yield run, setup_test_run(run, False, request)


def setup_test_run(run: sv_run.Run, create_objects: bool):
def setup_test_run(run: sv_run.Run, create_objects: bool, request: pytest.FixtureRequest):
fix_use_id: str = str(uuid.uuid4()).split('-', 1)[0]
TEST_DATA = {
"event_contains": "sent event",
"metadata": {
"test_engine": "pytest",
"test_identifier": fix_use_id
},
"folder": f"/simvue_unit_testing/{fix_use_id}"
"folder": f"/simvue_unit_testing/{fix_use_id}",
"tags": ["simvue_client_unit_tests", request.node.name]
}

if os.environ.get("CI"):
TEST_DATA["tags"].append("ci")

run.config(suppress_errors=False)
run.init(
name=f"test_run_{TEST_DATA['metadata']['test_identifier']}",
tags=["simvue_client_unit_tests"],
tags=TEST_DATA["tags"],
folder=TEST_DATA["folder"],
visibility="tenant",
retention_period="1 hour"
Expand Down
2 changes: 0 additions & 2 deletions tests/refactor/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,6 @@ def test_run_deletion(create_test_run: tuple[sv_run.Run, dict]) -> None:
@pytest.mark.client(depends=PRE_DELETION_TESTS)
def test_runs_deletion(create_test_run: tuple[sv_run.Run, dict]) -> None:
run, run_data = create_test_run
run.update_tags(["simvue_client_unit_tests", "test_runs_deletion"])
run.close()
client = svc.Client()
assert len(client.delete_runs(run_data["folder"])) > 0
Expand All @@ -185,7 +184,6 @@ def test_runs_deletion(create_test_run: tuple[sv_run.Run, dict]) -> None:
@pytest.mark.client(depends=PRE_DELETION_TESTS + ["test_runs_deletion"])
def test_folder_deletion(create_test_run: tuple[sv_run.Run, dict]) -> None:
run, run_data = create_test_run
run.update_tags(["simvue_client_unit_tests", "test_folder_deletion"])
run.close()
client = svc.Client()
# This test is called last, one run created so expect length 1
Expand Down
5 changes: 3 additions & 2 deletions tests/refactor/test_executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,14 @@
@pytest.mark.executor
@pytest.mark.parametrize("successful", (True, False), ids=("successful", "failing"))
def test_executor_add_process(
successful: bool
successful: bool,
request: pytest.FixtureRequest
) -> None:
run = simvue.Run()
completion_trigger = multiprocessing.Event()
run.init(
f"test_executor_{'success' if successful else 'fail'}",
tags=["simvue_client_unit_tests"],
tags=["simvue_client_unit_tests", request.node.name],
folder="/simvue_unit_test_folder"
)

Expand Down
32 changes: 13 additions & 19 deletions tests/refactor/test_run_class.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ def test_log_metrics(
overload_buffer: bool,
setup_logging: "CountingLogHandler",
mocker,
request: pytest.FixtureRequest,
visibility: typing.Union[typing.Literal["public", "tenant"], list[str], None]
) -> None:
METRICS = {"a": 10, "b": 1.2}
Expand All @@ -47,7 +48,7 @@ def test_log_metrics(
with pytest.raises(RuntimeError):
run.init(
name=f"test_run_{str(uuid.uuid4()).split('-', 1)[0]}",
tags=["simvue_client_unit_tests"],
tags=["simvue_client_unit_tests", request.node.name],
folder="/simvue_unit_testing",
retention_period="1 hour",
visibility=visibility,
Expand All @@ -57,15 +58,13 @@ def test_log_metrics(

run.init(
name=f"test_run_{str(uuid.uuid4()).split('-', 1)[0]}",
tags=["simvue_client_unit_tests"],
tags=["simvue_client_unit_tests", request.node.name],
folder="/simvue_unit_testing",
visibility=visibility,
resources_metrics_interval=1,
retention_period="1 hour",
)

run.update_tags(["simvue_client_unit_tests", "test_log_metrics"])

# Speed up the read rate for this test
run._dispatcher._max_buffer_size = 10
run._dispatcher._max_read_rate *= 10
Expand Down Expand Up @@ -110,31 +109,27 @@ def test_log_metrics(
def test_log_metrics_offline(create_test_run_offline: tuple[sv_run.Run, dict]) -> None:
METRICS = {"a": 10, "b": 1.2, "c": 2}
run, _ = create_test_run_offline
run.update_tags(["simvue_client_unit_tests", "test_log_metrics"])
run.log_metrics(METRICS)


@pytest.mark.run
def test_log_events(create_test_run: tuple[sv_run.Run, dict]) -> None:
EVENT_MSG = "Hello world!"
run, _ = create_test_run
run.update_tags(["simvue_client_unit_tests", "test_log_events"])
run.log_event(EVENT_MSG)


@pytest.mark.run
def test_log_events_offline(create_test_run_offline: tuple[sv_run.Run, dict]) -> None:
EVENT_MSG = "Hello world!"
run, _ = create_test_run_offline
run.update_tags(["simvue_client_unit_tests", "test_log_events"])
run.log_event(EVENT_MSG)


@pytest.mark.run
def test_update_metadata(create_test_run: tuple[sv_run.Run, dict]) -> None:
METADATA = {"a": 10, "b": 1.2, "c": "word"}
run, _ = create_test_run
run.update_tags(["simvue_client_unit_tests", "test_update_metadata"])
run.update_metadata(METADATA)


Expand All @@ -144,13 +139,12 @@ def test_update_metadata_offline(
) -> None:
METADATA = {"a": 10, "b": 1.2, "c": "word"}
run, _ = create_test_run_offline
run.update_tags(["simvue_client_unit_tests", "test_update_metadata"])
run.update_metadata(METADATA)


@pytest.mark.run
@pytest.mark.parametrize("multi_threaded", (True, False), ids=("multi", "single"))
def test_runs_multiple_parallel(multi_threaded: bool) -> None:
def test_runs_multiple_parallel(multi_threaded: bool, request: pytest.FixtureRequest) -> None:
N_RUNS: int = 2
if multi_threaded:

Expand All @@ -159,7 +153,7 @@ def thread_func(index: int) -> tuple[int, list[dict[str, typing.Any]], str]:
run.config(suppress_errors=False)
run.init(
name=f"test_runs_multiple_{index + 1}",
tags=["simvue_client_unit_tests", "test_multi_run_threaded"],
tags=["simvue_client_unit_tests", request.node.name],
folder="/simvue_unit_testing",
retention_period="1 hour",
)
Expand Down Expand Up @@ -196,7 +190,7 @@ def thread_func(index: int) -> tuple[int, list[dict[str, typing.Any]], str]:
run_1.config(suppress_errors=False)
run_1.init(
name="test_runs_multiple_unthreaded_1",
tags=["simvue_client_unit_tests", "test_multi_run_unthreaded"],
tags=["simvue_client_unit_tests", request.node.name],
folder="/simvue_unit_testing",
retention_period="1 hour",
)
Expand Down Expand Up @@ -238,7 +232,7 @@ def thread_func(index: int) -> tuple[int, list[dict[str, typing.Any]], str]:


@pytest.mark.run
def test_runs_multiple_series() -> None:
def test_runs_multiple_series(request: pytest.FixtureRequest) -> None:
N_RUNS: int = 2

metrics = []
Expand All @@ -250,7 +244,7 @@ def test_runs_multiple_series() -> None:
run.config(suppress_errors=False)
run.init(
name=f"test_runs_multiple_series_{index}",
tags=["simvue_client_unit_tests", "test_multi_run_series"],
tags=["simvue_client_unit_tests", request.node.name],
folder="/simvue_unit_testing",
retention_period="1 hour",
)
Expand Down Expand Up @@ -284,7 +278,7 @@ def test_runs_multiple_series() -> None:
@pytest.mark.run
@pytest.mark.parametrize("post_init", (True, False), ids=("pre-init", "post-init"))
def test_suppressed_errors(
setup_logging: "CountingLogHandler", post_init: bool
setup_logging: "CountingLogHandler", post_init: bool, request: pytest.FixtureRequest
) -> None:
setup_logging.captures = ["Skipping call to"]

Expand All @@ -300,7 +294,7 @@ def test_suppressed_errors(
run.init(
name="test_suppressed_errors",
folder="/simvue_unit_testing",
tags=["simvue_client_unit_tests"],
tags=["simvue_client_unit_tests", request.node.name],
retention_period="1 hour"
)

Expand All @@ -323,11 +317,11 @@ def test_bad_run_arguments() -> None:
run.init("sdas", [34])


def test_set_folder_details() -> None:
def test_set_folder_details(request: pytest.FixtureRequest) -> None:
with sv_run.Run() as run:
folder_name: str ="/simvue_unit_test_folder"
folder_name: str ="/simvue_unit_tests"
description: str = "test description"
tags: list[str] = ["simvue_client_unit_tests", "test_set_folder_details"]
tags: list[str] = ["simvue_client_unit_tests", request.node.name]
run.init(folder=folder_name)
run.set_folder_details(path=folder_name, tags=tags, description=description)

Expand Down
2 changes: 1 addition & 1 deletion tests/unit/test_run_init_folder.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def test_run_init_folder():
with pytest.raises(RuntimeError) as exc_info:
run.init(
metadata={"dataset.x1_lower": x1_lower, "dataset.x1_upper": x1_upper},
tags=["tag_1", "tag_2"],
tags=["tag_1", "tag_2", "test_run_init_folder"],
folder="test_folder",
description="A test to validate folder input passed into run.init",
retention_period="1 hour",
Expand Down
4 changes: 3 additions & 1 deletion tests/unit/test_run_init_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,9 @@ def test_run_init_metadata():
with pytest.raises(RuntimeError) as exc_info:
run.init(metadata={'dataset.x1_lower': x1_lower, 'dataset.x1_upper': x1_upper},
description="A test to validate inputs passed into metadata dictionary",
retention_period="1 hour"
retention_period="1 hour",
folder="/simvue_unit_testing",
tags=["simvue_client_unit_tests", "test_run_init_metadata"]
)

assert "Input should be a valid integer" in str(exc_info.value)
3 changes: 2 additions & 1 deletion tests/unit/test_run_init_tags.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ def test_run_init_tags():
with pytest.raises(RuntimeError) as exc_info:
run.init(metadata={'dataset.x1_lower': x1_lower, 'dataset.x1_upper': x1_upper}, tags=1,
description="A test to validate tag inputs passed into run.init",
retention_period="1 hour"
retention_period="1 hour",
folder="/simvue_unit_testing"
)

assert "Input should be a valid list" in str(exc_info.value)