diff --git a/common/library/module_utils/local_repo/process_parallel.py b/common/library/module_utils/local_repo/process_parallel.py index 8b6b553360..43d9c1a32e 100644 --- a/common/library/module_utils/local_repo/process_parallel.py +++ b/common/library/module_utils/local_repo/process_parallel.py @@ -276,6 +276,10 @@ def execute_parallel( arc, standard_logger, local_repo_config_path, + user_reg_cred_input, + user_reg_key_path, + omnia_credentials_yaml_path, + omnia_credentials_vault_path, timeout ): """ @@ -307,10 +311,10 @@ def execute_parallel( config = load_yaml_file(local_repo_config_path) user_registries = config.get("user_registry", []) if user_registries: - if is_encrypted(USER_REG_CRED_INPUT): - process_file(USER_REG_CRED_INPUT, USER_REG_KEY_PATH, 'decrypt') + if is_encrypted(user_reg_cred_input): + process_file(user_reg_cred_input, user_reg_key_path, 'decrypt') - file2_data = load_yaml_file(USER_REG_CRED_INPUT) + file2_data = load_yaml_file(user_reg_cred_input) cred_lookup = { entry['name']: entry for entry in file2_data.get('user_registry_credential', []) @@ -325,8 +329,8 @@ def execute_parallel( try: - docker_username, docker_password = load_docker_credentials(OMNIA_CREDENTIALS_YAML_PATH, - OMNIA_CREDENTIALS_VAULT_PATH) + docker_username, docker_password = load_docker_credentials(omnia_credentials_yaml_path, + omnia_credentials_vault_path) except RuntimeError as e: raise # Create a pool of worker processes to handle the tasks diff --git a/common/library/modules/cert_vault_handler.py b/common/library/modules/cert_vault_handler.py index 057a90f469..27afc97da4 100644 --- a/common/library/modules/cert_vault_handler.py +++ b/common/library/modules/cert_vault_handler.py @@ -81,8 +81,8 @@ def main(): log.info(f"Start execution time cert_vault_handler: {start_time}") - - local_repo_config = load_yaml_file(LOCAL_REPO_CONFIG_PATH_DEFAULT) + local_repo_path = os.path.join(vault_key_path, "local_repo_config.yml") + local_repo_config = load_yaml_file(local_repo_path) user_repos = local_repo_config.get(USER_REPO_URL, []) if not user_repos: log.info("No user repo found, proceeding without encryption") diff --git a/common/library/modules/check_user_registry.py b/common/library/modules/check_user_registry.py index b5199f79f9..8f59c93f68 100644 --- a/common/library/modules/check_user_registry.py +++ b/common/library/modules/check_user_registry.py @@ -16,7 +16,12 @@ #!/usr/bin/python from ansible.module_utils.basic import AnsibleModule -from ansible.module_utils.local_repo.common_functions import load_yaml_file, get_repo_list, is_encrypted, process_file +from ansible.module_utils.local_repo.common_functions import ( + load_yaml_file, + get_repo_list, + is_encrypted, + process_file +) from ansible.module_utils.local_repo.registry_utils import ( validate_user_registry, check_reachability, @@ -40,12 +45,16 @@ def main(): argument_spec=dict( timeout=dict(type='int', default=5), config_file=dict(type='str', required=True), + user_reg_cred_input=dict(type='str', required=False, default=USER_REG_CRED_INPUT), + user_reg_key_path=dict(type='str', required=False, default=USER_REG_KEY_PATH) ), supports_check_mode=True ) config_path = module.params['config_file'] timeout = module.params['timeout'] + user_reg_cred_input = module.params["user_reg_cred_input"] + user_reg_key_path = module.params["user_reg_key_path"] try: config_data = load_yaml_file(config_path) @@ -56,10 +65,10 @@ def main(): if user_registry: # Load credentials - if is_encrypted(USER_REG_CRED_INPUT): - process_file(USER_REG_CRED_INPUT, USER_REG_KEY_PATH, 'decrypt') + if is_encrypted(user_reg_cred_input): + process_file(user_reg_cred_input, user_reg_key_path, 'decrypt') - file2_data = load_yaml_file(USER_REG_CRED_INPUT) + file2_data = load_yaml_file(user_reg_cred_input) cred_lookup = { entry['name']: entry for entry in file2_data.get('user_registry_credential', []) diff --git a/common/library/modules/parallel_tasks.py b/common/library/modules/parallel_tasks.py index bdf4416761..5a8546e9a6 100644 --- a/common/library/modules/parallel_tasks.py +++ b/common/library/modules/parallel_tasks.py @@ -54,7 +54,9 @@ STATUS_CSV_HEADER, LOCAL_REPO_CONFIG_PATH_DEFAULT, USER_REG_CRED_INPUT, - USER_REG_KEY_PATH + USER_REG_KEY_PATH, + OMNIA_CREDENTIALS_YAML_PATH, + OMNIA_CREDENTIALS_VAULT_PATH ) def update_status_csv(csv_dir, software, overall_status): @@ -259,6 +261,10 @@ def main(): "overall_status_dict": {"type": "dict", "required": False, "default": {}}, "local_repo_config_path": {"type": "str", "required": False, "default": LOCAL_REPO_CONFIG_PATH_DEFAULT}, "arch": {"type": "str", "required": False} + "user_reg_cred_input": {"type": "str", "required": False, "default": USER_REG_CRED_INPUT}, + "user_reg_key_path": {"type": "str", "required": False, "default": USER_REG_KEY_PATH}, + "omnia_credentials_yaml_path": {"type": "str", "required": False, "default": OMNIA_CREDENTIALS_YAML_PATH}, + "omnia_credentials_vault_path": {"type": "str", "required": False, "default": OMNIA_CREDENTIALS_VAULT_PATH} } module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) tasks = module.params["tasks"] @@ -275,6 +281,11 @@ def main(): overall_status_dict = module.params['overall_status_dict'] local_repo_config_path = module.params["local_repo_config_path"] arc= module.params["arch"] + user_reg_cred_input = module.params["user_reg_cred_input"] + user_reg_key_path = module.params["user_reg_key_path"] + omnia_credentials_yaml_path = module.params["omnia_credentials_yaml_path"] + omnia_credentials_vault_path = module.params["omnia_credentials_vault_path"] + # Initialize standard logger. slogger = setup_standard_logger(slog_file) result = {"changed": False, "task_results": []} @@ -304,18 +315,19 @@ def main(): slogger.info(f"Cluster OS: {cluster_os_type}") slogger.info(f"Version Variables: {version_variables}") gen_result = {} - if not os.path.isfile(USER_REG_KEY_PATH): - gen_result = generate_vault_key(USER_REG_KEY_PATH) + if not os.path.isfile(user_reg_key_path): + gen_result = generate_vault_key(user_reg_key_path) if gen_result is None: - module.fail_json(msg=f"Unable to generate local_repo key at path: {USER_REG_KEY_PATH}") + module.fail_json(msg=f"Unable to generate local_repo key at path: {user_reg_key_path}") overall_status, task_results = execute_parallel( tasks, determine_function, nthreads, repo_store_path, csv_file_path, - log_dir, user_data, version_variables, arc, slogger, local_repo_config_path, timeout + log_dir, user_data, version_variables, slogger, local_repo_config_path, user_reg_cred_input, user_reg_key_path, + omnia_credentials_yaml_path, omnia_credentials_vault_path, timeout ) - if not is_encrypted(USER_REG_CRED_INPUT): - process_file(USER_REG_CRED_INPUT,USER_REG_KEY_PATH,'encrypt') + if not is_encrypted(user_reg_cred_input): + process_file(user_reg_cred_input,user_reg_key_path,'encrypt') end_time = datetime.now() formatted_end_time = end_time.strftime("%I:%M:%S %p") diff --git a/local_repo/roles/parse_and_download/tasks/execute_parallel_tasks.yml b/local_repo/roles/parse_and_download/tasks/execute_parallel_tasks.yml index 47bbb54ded..5d1dfc8bdc 100644 --- a/local_repo/roles/parse_and_download/tasks/execute_parallel_tasks.yml +++ b/local_repo/roles/parse_and_download/tasks/execute_parallel_tasks.yml @@ -27,6 +27,10 @@ user_json_file: "{{ user_json_file }}" local_repo_config_path: "{{ local_repo_config_path }}" arch: "{{ item.arch }}" + user_reg_cred_input: "{{ user_reg_cred_input }}" + user_reg_key_path: "{{ user_reg_key_path }}" + omnia_credentials_yaml_path: "{{ omnia_credentials_yaml_path }}" + omnia_credentials_vault_path: "{{ omnia_credentials_vault_path }}" nthreads: "{{ (local_repo_py_module_vars[item.key].nthreads | default(local_repo_py_module_vars.default_vars.nthreads)) }}" timeout: "{{ (local_repo_py_module_vars[item.key].timeout | default(local_repo_py_module_vars.default_vars.timeout)) }}" register: task_results diff --git a/local_repo/roles/parse_and_download/vars/main.yml b/local_repo/roles/parse_and_download/vars/main.yml index 0e6eb88308..c2661e1a84 100644 --- a/local_repo/roles/parse_and_download/vars/main.yml +++ b/local_repo/roles/parse_and_download/vars/main.yml @@ -23,6 +23,10 @@ local_repo_config_path: "{{ project_input_path }}/local_repo_config.yml" sw_config_json_path: "{{ project_input_path }}/software_config.json" roles_config_path: "{{ project_input_path }}/roles_config.yml" user_json_file: "{{ project_input_path }}/software_config.json" +user_reg_cred_input: "{{ project_input_path }}/user_registry_credential.yml" +user_reg_key_path: "{{ project_input_path }}/.local_repo_credentials_key" +omnia_credentials_yaml_path: "{{ project_input_path }}/omnia_config_credentials.yml" +omnia_credentials_vault_path: "{{ project_input_path }}/.omnia_config_credentials_key" clean_rpms: true rpm_dir_path: "{{ repo_store_path }}/offline_repo/cluster/{{ item }}/rhel/9.6/rpm" local_repo_py_module_vars: diff --git a/local_repo/roles/validation/tasks/main.yml b/local_repo/roles/validation/tasks/main.yml index 75c64a58b1..945893d997 100644 --- a/local_repo/roles/validation/tasks/main.yml +++ b/local_repo/roles/validation/tasks/main.yml @@ -59,6 +59,8 @@ - name: Check user registry reachability check_user_registry: config_file: "{{ local_repo_config_file }}" + user_reg_cred_input: "{{ user_reg_cred_input }}" + user_reg_key_path: "{{ user_reg_key_path }}" timeout: "{{ time_out }}" register: registry_check_result diff --git a/local_repo/roles/validation/vars/main.yml b/local_repo/roles/validation/vars/main.yml index a0d7c019c6..fe174a217b 100644 --- a/local_repo/roles/validation/vars/main.yml +++ b/local_repo/roles/validation/vars/main.yml @@ -56,6 +56,8 @@ kubeadm_package_name: "kubeadm-v1.31.4" # Usage: main.yml nfs_shared_path: "/opt/omnia" local_repo_config_file: "{{ project_input_path }}/local_repo_config.yml" +user_reg_cred_input: "{{ project_input_path }}/user_registry_credential.yml" +user_reg_key_path: "{{ project_input_path }}/.local_repo_credentials_key" var_mount_percentage_limit: 80 var_mount_overuse_msg: | [WARNING] local_repo.yml may fail as /var mount usage has exceeded the limit of {{ var_mount_percentage_limit }}%. diff --git a/scheduler/job_based_user_access.yml b/scheduler/job_based_user_access.yml deleted file mode 100644 index f5719feed1..0000000000 --- a/scheduler/job_based_user_access.yml +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2024 Dell Inc. or its subsidiaries. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. ---- - -# This playbook is used to configure slurm job based user access in compute nodes -# The inventory queried in the below command is to be created by the user prior to running `omnia.yml`. -# Command to execute: ansible-playbook job_based_user_access.yml -i inventory - -- name: Update Inventory with ansible_host information - ansible.builtin.import_playbook: ../utils/servicetag_host_mapping.yml - when: not ( hostvars['127.0.0.1']['update_inventory_executed'] | default(false) | bool ) - -- name: Include omnia_config.yml variables - hosts: localhost - connection: local - tasks: - - name: Validate scheduler software config input parameters - ansible.builtin.include_role: - name: cluster_validation - tasks_from: fetch_software_config.yml - - - name: Validate scheduler omnia config input parameters - ansible.builtin.include_role: - name: cluster_validation - tasks_from: fetch_omnia_inputs.yml - -- name: Setup slurm pam authentication - hosts: slurm_control_node, slurm_node, login - gather_facts: true - roles: - - slurm_pam diff --git a/scheduler/roles/slurm/tasks/main.yml b/scheduler/roles/slurm/tasks/main.yml index 6789fdf627..f247581112 100644 --- a/scheduler/roles/slurm/tasks/main.yml +++ b/scheduler/roles/slurm/tasks/main.yml @@ -59,3 +59,7 @@ - name: Fresh cluster install ansible.builtin.include_tasks: new_install.yml when: not slurmctld_status + +- name: Slurm pam + ansible.builtin.include_tasks: slurm_pam.yml + when: enable_slurm_pam | bool diff --git a/scheduler/roles/slurm_pam/tasks/slurm_pam_compute.yml b/scheduler/roles/slurm/tasks/slurm_pam.yml similarity index 51% rename from scheduler/roles/slurm_pam/tasks/slurm_pam_compute.yml rename to scheduler/roles/slurm/tasks/slurm_pam.yml index 33561d9f13..e39914a9b8 100644 --- a/scheduler/roles/slurm_pam/tasks/slurm_pam_compute.yml +++ b/scheduler/roles/slurm/tasks/slurm_pam.yml @@ -1,4 +1,4 @@ -# Copyright 2023 Dell Inc. or its subsidiaries. All Rights Reserved. +# Copyright 2025 Dell Inc. or its subsidiaries. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,33 +11,32 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ---- +# Install slurm_pam +- name: Install compute packages + ansible.builtin.package: + name: "{{ slurm_pam_packages[ansible_os_family] }}" + state: present + when: + - '"login" in group_names or "slurm_node" in group_names' + +# Compute node - name: Modify sshd file for slurm pam configuration ansible.builtin.lineinfile: path: "{{ pam_sshd_path }}" insertafter: "{{ pam_sshd_regexp }}" line: "{{ pam_sshd_config_compute }}" + when: '"slurm_node" in group_names' -- name: Remove pam_systemd.so line in common-session - ansible.builtin.lineinfile: - path: "{{ common_session_file_path }}" - regexp: "{{ pam_systemd_regexp }}" - state: absent - when: - - ansible_distribution | lower == "ubuntu" - - ansible_distribution_version == "24.04" - -- name: Remove pam_systemd.so line in password-auth - ansible.builtin.lineinfile: - path: "{{ password_auth_file_path }}" - regexp: "{{ pam_systemd_regexp }}" - state: absent -- name: Start slurmd on compute nodes - ansible.builtin.systemd: - name: slurmd.service - state: restarted - enabled: true - register: slurmd_status - when: '"slurm_node" in group_names' +# Slurm_pam needs slurm.conf in the default path /etc/slurm +# Creating symlink for nfs_share mode +- name: Create a symbolic link for slurm conf (login node) + ansible.builtin.file: + src: "{{ slurm_share_prefix }}{{ slurm_config_dir }}/slurm.conf" + dest: "/{{ slurm_config_dir }}/slurm.conf" + state: link + force: true + when: + - slurm_installation_type == "nfs_share" + - '"login" in group_names or "slurm_node" in group_names' diff --git a/scheduler/roles/slurm/vars/main.yml b/scheduler/roles/slurm/vars/main.yml index 0e128fe1bc..f136002b62 100644 --- a/scheduler/roles/slurm/vars/main.yml +++ b/scheduler/roles/slurm/vars/main.yml @@ -104,3 +104,12 @@ utils_packages_file: "{{ input_project_dir }}/config/{{ software_config.cluster_ path_edit_msg: "Editing the bashrc failed, please manually source the /etc/environment for slurm" slurm_support_msg: "Slurm is not added in software_config, hence skipping slurm deployment" share_unavailable_msg: "Slurm install type is nfs_share, but share_path not available, hence skipping slurm deployment" + +# Usage: slurm-pam +enable_slurm_pam: true +pam_sshd_path: /etc/pam.d/sshd +pam_sshd_regexp: "^account required" +pam_sshd_config_compute: "account required pam_slurm_adopt.so action_no_jobs=deny" +slurm_pam_packages: + RedHat: + - slurm-pam_slurm diff --git a/scheduler/roles/slurm_pam/tasks/main.yml b/scheduler/roles/slurm_pam/tasks/main.yml deleted file mode 100644 index 21dfe50d70..0000000000 --- a/scheduler/roles/slurm_pam/tasks/main.yml +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2023 Dell Inc. or its subsidiaries. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. ---- -- name: Tasks for slurm_pam role - when: hostvars['127.0.0.1']['slurm_support'] - block: - - name: Load local_repo_access.yml file - ansible.builtin.include_vars: "{{ local_repo_access_dest_path }}" - - - name: Append share_path to variables when slurm_installation_type is nfs_share - ansible.builtin.set_fact: - slurm_confpth: "{{ hostvars['127.0.0.1']['share_path'] }}{{ slurm_folder }}{{ slurm_conf_file_pth }}" - when: slurm_installation_type == 'nfs_share' - - - name: Install slurm-pam_slurm - ansible.builtin.package: - name: slurm-pam_slurm - state: present - when: ansible_distribution | lower is in ['redhat', 'rocky'] - - - name: Install libpam-slurm-adopt - ansible.builtin.package: - name: libpam-slurm-adopt - state: present - when: ansible_distribution | lower == 'ubuntu' - - - name: Configure slurm pam for Slurm nodes - ansible.builtin.include_tasks: slurm_pam_compute.yml - when: inventory_hostname in groups['slurm_node'] diff --git a/scheduler/roles/slurm_pam/vars/main.yml b/scheduler/roles/slurm_pam/vars/main.yml deleted file mode 100644 index e17e7ef74d..0000000000 --- a/scheduler/roles/slurm_pam/vars/main.yml +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright 2023 Dell Inc. or its subsidiaries. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. ---- - -# Usage: main.yml -slurm_control_node_group_fail_msg: "Slurm control node group should contain exactly 1 node" -slurm_control_node_group_success_msg: "Slurm control node group check passed" -slurm_node_group_fail_msg: "Slurm node group should contain atleast 1 node" -slurm_node_group_success_msg: "Slurm node group check passed" -dir_permission: "0755" -slurm_conf_file_pth: "/etc/slurm/slurm.conf" -slurm_dir: "/etc/slurm" - -# Usage: slurm_pam_compute.yml -pam_sshd_path: /etc/pam.d/sshd -pam_sshd_regexp: "^account required" -pam_sshd_config_compute: "account required pam_slurm_adopt.so action_no_jobs=deny" -common_session_file_path: "/etc/pam.d/common-session" -password_auth_file_path: "/etc/pam.d/password-auth" -pam_systemd_regexp: '\s*pam_systemd.so' - -# Usage: slurm_pam_login_node.yml -pam_sshd_config_login: "account sufficient pam_slurm_adopt.so action_no_jobs=deny" - -# Usage: slurm_pam_manager.yml -pam_sshd_config_manager: "account sufficient pam_slurm_adopt.so action_no_jobs=deny" -local_repo_access_dest_path: "/opt/omnia/provision/local_repo_access.yml" -slurm_installation_type: "{{ hostvars['localhost']['slurm_installation_type'] }}" -slurm_share_path: "{{ hostvars['localhost']['share_path'] }}" -slurm_folder: "/slurm" -share_path_empty_failure_msg: "share_path cannot be empty , please provide a proper path in omnia_config.yml" -slurm_installation_type_empty_failure_msg: "Slurm Installation type cannot be empty in omnia_config.yml" -slurm_installation_type_wrong_failure_msg: "Slurm Installation Type should be either nfs_share or configless in omnia_config.yml" diff --git a/scheduler/scheduler.yml b/scheduler/scheduler.yml index bb258d5de7..53dcb4f7a0 100644 --- a/scheduler/scheduler.yml +++ b/scheduler/scheduler.yml @@ -159,12 +159,6 @@ roles: - slurm -- name: Install Slurm_pam - hosts: slurm_node,login - any_errors_fatal: true - roles: - - slurm_pam - - name: Compile and install the ucx and openmpi on the nfs share of compute nodes hosts: slurm_control_node, kube_control_plane gather_facts: true diff --git a/utils/roles/oim_cleanup/oim_container_cleanup/tasks/main.yml b/utils/roles/oim_cleanup/oim_container_cleanup/tasks/main.yml index 90d268947f..ea1cd53fe9 100644 --- a/utils/roles/oim_cleanup/oim_container_cleanup/tasks/main.yml +++ b/utils/roles/oim_cleanup/oim_container_cleanup/tasks/main.yml @@ -13,6 +13,10 @@ # limitations under the License. --- +- name: Define project input path + ansible.builtin.set_fact: + project_input_path: "{{ hostvars['localhost']['input_project_dir'] }}" + - name: Ensure firewalld is installed and running ansible.builtin.dnf: name: firewalld diff --git a/utils/roles/oim_cleanup/oim_container_cleanup/vars/main.yml b/utils/roles/oim_cleanup/oim_container_cleanup/vars/main.yml index ad62a9fcf0..de13c33750 100644 --- a/utils/roles/oim_cleanup/oim_container_cleanup/vars/main.yml +++ b/utils/roles/oim_cleanup/oim_container_cleanup/vars/main.yml @@ -43,7 +43,7 @@ pulp_cleanup_directory: - "{{ omnia_nfs_share }}/log/pulp" - "{{ omnia_nfs_share }}/pulp/settings" - "{{ omnia_nfs_share }}/pulp/nginx" - - "{{ omnia_nfs_share }}/input/project_default/.local_repo_credentials_key" + - "{{ project_input_path }}/.local_repo_credentials_key" - "{{ omnia_nfs_share }}/offline_repo" - "{{ omnia_nfs_share }}/log/local_repo" - "{{ omnia_nfs_share }}/k8s_dynamic_json" diff --git a/utils/roles/oim_cleanup/oim_helper_node_cleanup/tasks/disable_helper_node_boot.yml b/utils/roles/oim_cleanup/oim_helper_node_cleanup/tasks/disable_helper_node_boot.yml index 989a70bc88..1ff5d6f720 100644 --- a/utils/roles/oim_cleanup/oim_helper_node_cleanup/tasks/disable_helper_node_boot.yml +++ b/utils/roles/oim_cleanup/oim_helper_node_cleanup/tasks/disable_helper_node_boot.yml @@ -94,6 +94,9 @@ no_log: true rescue: - - name: Fail if boot mode setting or boot source disable failed - ansible.builtin.fail: + - name: Warn if boot mode or boot source disable failed, ignore and proceed + ansible.builtin.debug: msg: "{{ bootmode_failure_msg_template }}" + + - name: Continue play without failing + ansible.builtin.meta: clear_host_errors