diff --git a/pyproject.toml b/pyproject.toml index e92e5b9..d82a441 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,6 +40,8 @@ Source = "https://github.com/aiidaplugins/aiida-epw" 'epw.base' = 'aiida_epw.workflows.base:EpwBaseWorkChain' 'epw.epw_prep' = 'aiida_epw.workflows.prep:EpwPrepWorkChain' 'epw.supercon' = 'aiida_epw.workflows.supercon:SuperConWorkChain' +'epw.mobility' = 'aiida_epw.workflows.mobility:MobilityWorkChain' + [project.optional-dependencies] docs = [ diff --git a/src/aiida_epw/calculations/epw.py b/src/aiida_epw/calculations/epw.py index c860c2a..fab3bfd 100644 --- a/src/aiida_epw/calculations/epw.py +++ b/src/aiida_epw/calculations/epw.py @@ -48,6 +48,7 @@ class EpwCalculation(CalcJob): _output_phbands_file = "phband.freq" _FOLDER_SAVE = "save" _FOLDER_DYNAMICAL_MATRIX = "DYN_MAT" + _MAX_NSTEMP = 1000 # Not using symlink in pw to allow multiple nscf to run on top of the same scf _default_symlink_usage = False @@ -103,6 +104,12 @@ def define(cls, spec): required=False, help=("The script to convert the chk file to a ukk file"), ) + spec.input( + "quadrupole_dir", + valid_type=(orm.Str, orm.RemoteData), + required=False, + help=("RemoteData containing the quadrupole.fmt file, or its absolute path string."), + ) spec.inputs["metadata"]["options"]["parser_name"].default = "epw.epw" @@ -299,6 +306,22 @@ def test_offset(offset): ) ) + if 'quadrupole_dir' in self.inputs: + source_path = self.inputs.quadrupole_dir.value + + code = self.inputs.epw.code + + remote_list.append( + ( + code.computer.uuid, + Path( + source_path, ".quadrupole.fmt" + ).as_posix(), + "quadrupole.fmt", + ) + ) + + if "parent_folder_epw" in self.inputs: parent_folder_epw = self.inputs.parent_folder_epw if isinstance(parent_folder_epw, orm.RemoteStashFolderData): @@ -314,7 +337,14 @@ def test_offset(offset): "selecq.fmt", "crystal.fmt", "epwdata.fmt", - vme_fmt_dict[parameters["INPUTEPW"]["vme"]], + "dmedata.fmt", + "vmedata.fmt", + "quadrupole.fmt", + "decay.H", + "decay.dynmat", + "decay.epmate", + "decay.epmatp", + # vme_fmt_dict[parameters["INPUTEPW"]["vme"]], f"{self._PREFIX}.kgmap", f"{self._PREFIX}.kmap", f"{self._PREFIX}.ukk", diff --git a/src/aiida_epw/tools/parsers.py b/src/aiida_epw/tools/parsers.py index 34f2a84..92ef998 100644 --- a/src/aiida_epw/tools/parsers.py +++ b/src/aiida_epw/tools/parsers.py @@ -1,16 +1,87 @@ -"""Manual parsing functions for post-processing.""" +"""Manual parsing functions for post-processing. These functions can either be used independently or as helper functions of the `EpwParser` class. +It would be good to always have a `parsed_data` dictionary as an output.""" import numpy +import re +import io Ry2eV = 13.605662285137 +### This parse function is used for parsing the `band.eig` and `phband.freq` files. +def parse_epw_bands(file_content): + """Parse the contents of a band structure file generated by EPW software.""" + parsed_data = {} + + nbnd, nks = ( + int(v) + for v in re.search(r"&plot nbnd=\s+(\d+), nks=\s+(\d+)", file_content).groups() + ) + kpt_pattern = re.compile(r"\s([\s-][\d\.]+)" * 3) + band_pattern = re.compile(r"\s+([-\d\.]+)" * nbnd) + + kpoints = [] + bands = [] + + for number, line in enumerate(file_content.splitlines()): + match_kpt = re.search(kpt_pattern, line) + if match_kpt and number % 2 == 1: + kpoints.append(list(match_kpt.groups())) + + match_band = re.search(band_pattern, line) + if match_band and number % 2 == 0: + bands.append(list(match_band.groups())) + + parsed_data["kpoints"] = numpy.array(kpoints, dtype=float) + parsed_data["bands"] = numpy.array(bands, dtype=float) + + return parsed_data + + +def parse_epw_eldos(file_content): + """Parse the contents of the `.dos` file. + parameters: + file_content: the content of the `.dos` file as a string. + returns: + parsed_data: a dictionary containing the energy, the electronic density of states and the integrated density of states. + """ + parsed_data = {} + + dos = numpy.loadtxt(io.StringIO(file_content), dtype=float, comments="#") + + parsed_data["energy"] = dos[:, 0] + parsed_data["edos"] = dos[:, 1] + parsed_data["integrated_dos"] = dos[:, 2] + return parsed_data + + +def parse_epw_phdos(file_content): + """Parse the contents of the `.phdos` file. + parameters: + file_content: the content of the `.phdos` file as a string. + returns: + parsed_data: a dictionary containing the frequency, the phonon density of states. Note that there are multiple phonon density of states for different smearing. + """ + parsed_data = {} + + phdos = numpy.loadtxt(io.StringIO(file_content), dtype=float, skiprows=1) + parsed_data["frequency"] = phdos[:, 0] + parsed_data["phdos"] = phdos[:, 1:] + return parsed_data + + def parse_epw_a2f(file_content): + """Parse the contents of the `.a2f` file. + parameters: + file_content: the content of the `.a2f` file as a string. + returns: + parsed_data: a dictionary containing the frequency, the spectral function for different smearing, and metadata such as electron smearing, Fermi window and summed el-ph coupling strength. + """ parsed_data = {} a2f, footer = file_content.split("\n Integrated el-ph coupling") - a2f_array = numpy.array([line.split() for line in a2f.split("\n")], dtype=float) + a2f_array = numpy.array([line.split() for line in a2f.split("\n")[1:]], dtype=float) parsed_data["frequency"] = a2f_array[:, 0] parsed_data["a2f"] = a2f_array[:, 1:] @@ -31,3 +102,117 @@ def parse_epw_a2f(file_content): parsed_data[property] = float(line.split()[-1]) return parsed_data + + +def parse_epw_max_eigenvalue(file_content): + """Parse the max_eigenvalue part of the `stdout` file when solving the linearized Eliashberg equation.""" + parsed_data = {} + re_pattern = re.compile(r"\s+([\d\.]+)\s+([\d\.-]+)\s+\d+\s+[\d\.]+\s+\d+\n") + parsing_block = file_content.split( + "Finish: Solving (isotropic) linearized Eliashberg" + )[0] + + parsed_data["max_eigenvalue"] = numpy.array( + re_pattern.findall(parsing_block), dtype=float + ) + return parsed_data + + +def parse_epw_imag_iso(file_contents, prefix="aiida"): + """Parse the isotropic gap functions from EPW isotropic Eliashberg equation calculation. + parameters: + folder: the folder containing the `imag_iso` files. When serving as a helper function, it can take a `Retrieved` folder from aiida . + When used independently, it can take a local folder. + prefix: the prefix of the `imag_iso` files. + returns: + parsed_data: a dictionary containing the isotropic gap functions of numpy array type and the corresponding temperatures as keys. + """ + parsed_data = {} + pattern_iso = re.compile(rf"^{prefix}\.imag_iso_(\d{{3}}\.\d{{2}})$") + + for filename, file_content in file_contents.items(): + match = pattern_iso.match(filename) + if match: + T = float(match.group(1)) + gap_function = numpy.loadtxt( + io.StringIO(file_content), dtype=float, comments="#", skiprows=1 + ) + parsed_data[T] = gap_function + return parsed_data + + +def parse_epw_imag_aniso_gap0(file_contents, prefix="aiida"): + """Parse the anisotropic gap functions from EPW anisotropic Eliashberg equation calculation. + parameters: + file_contents: a dictionary containing the file contents with filename as keys. + prefix: the prefix of the `imag_aniso_gap0` files. + returns: + parsed_data: a sorted dictionary containing the anisotropic gap functions of numpy array type and the corresponding temperatures as keys. + """ + parsed_data = {} + pattern_aniso_gap0 = re.compile(rf"^{prefix}\.imag_aniso_gap0_(\d{{3}}\.\d{{2}})$") + + for filename, file_content in file_contents.items(): + match = pattern_aniso_gap0.match(filename) + if match: + T = float(match.group(1)) + gap_function = numpy.loadtxt( + io.StringIO(file_content), dtype=float, comments="#", skiprows=1 + ) + parsed_data[T] = gap_function + return parsed_data + + +@staticmethod +def parse_stdout(stdout, logs): + """Parse the ``stdout``.""" + + def parse_max_eigenvalue(stdout_block): + re_pattern = re.compile( + r"\s+([\d\.]+)\s+([\d\.-]+)\s+\d+\s+[\d\.]+\s+\d+\n" + ) + parsing_block = stdout_block.split( + "Finish: Solving (isotropic) linearized Eliashberg" + )[0] + max_eigenvalue_array = orm.XyData() + max_eigenvalue_array.set_array( + "max_eigenvalue", + numpy.array(re_pattern.findall(parsing_block), dtype=float), + ) + return max_eigenvalue_array + + data_type_regex = ( + ( + "allen_dynes", + float, + re.compile(r"\s+Estimated Allen-Dynes Tc =\s+([\d\.]+) K"), + ), + ( + "fermi_energy_coarse", + float, + re.compile(r"\s+Fermi energy coarse grid =\s+([\d\.-]+)\seV"), + ), + ) + data_block_marker_parser = ( + ( + "max_eigenvalue", + "Superconducting transition temp. Tc", + parse_max_eigenvalue, + ), + ) + parsed_data = {} + stdout_lines = stdout.split("\n") + + for line_number, line in enumerate(stdout_lines): + for data_key, type, re_pattern in data_type_regex: + match = re.search(re_pattern, line) + if match: + parsed_data[data_key] = type(match.group(1)) + + for data_key, data_marker, block_parser in data_block_marker_parser: + if data_marker in line: + parsed_data[data_key] = block_parser(stdout[line_number:]) + + return parsed_data, logs + + diff --git a/src/aiida_epw/workflows/mobility.py b/src/aiida_epw/workflows/mobility.py new file mode 100644 index 0000000..8ba598f --- /dev/null +++ b/src/aiida_epw/workflows/mobility.py @@ -0,0 +1,474 @@ +"""Work chain for computing the critical temperature based on an `EpwWorkChain`.""" + +from scipy.interpolate import interp1d + +from aiida import orm +from aiida.common import AttributeDict +from aiida.engine import WorkChain, ToContext, while_, if_, append_ + +from aiida_quantumespresso.workflows.protocols.utils import ProtocolMixin + +from aiida_epw.workflows.base import EpwBaseWorkChain + +from aiida.engine import calcfunction + + +@calcfunction +def stash_to_remote(stash_data: orm.RemoteStashFolderData) -> orm.RemoteData: + """Convert a ``RemoteStashFolderData`` into a ``RemoteData``.""" + + if stash_data.get_attribute("stash_mode") != "copy": + raise NotImplementedError("Only the `copy` stash mode is supported.") + + remote_data = orm.RemoteData() + remote_data.set_attribute( + "remote_path", stash_data.get_attribute("target_basepath") + ) + remote_data.computer = stash_data.computer + + return remote_data + + +@calcfunction +def split_list(list_node: orm.List) -> dict: + return {f"el_{no}": orm.Float(el) for no, el in enumerate(list_node.get_list())} + + +class MobilityWorkChain(ProtocolMixin, WorkChain): + """ + Workchain to compute carrier mobility by converging results with respect to EPW interpolation. + + This workchain runs a series of `EpwBaseWorkChain` calculations to converge the + carrier mobility with respect to the fine k-point and q-point mesh density used for + electron-phonon interpolation. Once converged, it runs a final `EpwBaseWorkChain` + to obtain the final mobility values. + """ + + @classmethod + def define(cls, spec): + """Define the work chain specification.""" + super().define(spec) + + spec.input("structure", valid_type=orm.StructureData) + spec.input( + "clean_workdir", valid_type=orm.Bool, default=lambda: orm.Bool(False) + ) + spec.input( + "parent_folder_epw", valid_type=(orm.RemoteData, orm.RemoteStashFolderData) + ) + spec.input("interpolation_distance", valid_type=(orm.Float, orm.List)) + spec.input("convergence_threshold", valid_type=orm.Float, required=False) + spec.input( + "always_run_final", valid_type=orm.Bool, default=lambda: orm.Bool(False) + ) + spec.input( + "quadruple_dir", valid_type=orm.Str, # orm.RemoteData, orm.RemoteStashFolderData) + required=False, + help="Absolute path to the quadrupole.fmt file." + ) + + # spec.input( + # "quadruple", + # valid_type=orm.XyData, + # required=False, + # help="The contents of the `quadruple.mft` file for the e-ph interpolation.", + # ) + + # spec.expose_inputs( + # EpwBaseWorkChain, + # namespace="epw_interp", + # exclude=( + # "clean_workdir", + # "parent_folder_ph", + # "parent_folder_nscf", + # "parent_folder_chk", + # "qfpoints", + # "kfpoints", + # ), + # namespace_options={ + # "help": "Inputs for the interpolation `EpwBaseWorkChain`s." + # }, + # ) + + spec.expose_inputs( + EpwBaseWorkChain, + namespace="epw_mobility", + exclude=( + "clean_workdir", + "parent_folder_ph", + "parent_folder_nscf", + "parent_folder_chk", + + ), + namespace_options={ + "help": "Inputs for the carrier mobility `EpwBaseWorkChain`." + }, + ) + + spec.outline( + cls.setup, + #cls.generate_reciprocal_points, + while_(cls.should_run_conv_test)( + cls.run_conv, + cls.inspect_conv, + ), + cls.results, + ) + spec.output( + "parameters", + valid_type=orm.Dict, + help="The `output_parameters` output node of the final EPW calculation.", + ) + #spec.output( + # "carrier_mobility", + # valid_type=orm.XyData, + # help="The carrier mobility of both iBTE and SERTA calculations from EPW.", + #) + + spec.output( + "carrier_mobility", + valid_type=orm.Dict, + help="The carrier mobility of both iterative iBTE and SERTA calculations from EPW.", + ) + + spec.exit_code( + 401, + "ERROR_SUB_PROCESS_EPW_INTERP", + message="The interpolation `EpwBaseWorkChain` sub process failed", + ) + spec.exit_code( + 402, + "ERROR_MOBILITY_NOT_CONVERGED", + message="Carrier mobility is not converged.", + ) + spec.exit_code( + 403, + "ERROR_NOT_SUFFICIENT_SPACE", + message="The space for running the calculation is not sufficient.", + ) + spec.exit_code( + 404, + "ERROR_EPW_INTERPOLATION", + message="The interpolated bands or e-ph matrix from epw is not available.", + ) + + @classmethod + def get_protocol_filepath(cls): + """Return ``pathlib.Path`` to the ``.yaml`` file that defines the protocols.""" + from importlib_resources import files + from . import protocols + + return files(protocols) / "mobility.yaml" + + @classmethod + def get_builder_from_protocol( + cls, + epw_code, + parent_epw, + protocol=None, + overrides=None, + scon_epw_code=None, + parent_folder_epw=None, + **kwargs, + ): + """Return a builder prepopulated with inputs selected according to the chosen protocol. + + :TODO: + """ + inputs = cls.get_protocol_inputs(protocol, overrides) + + builder = cls.get_builder() + + if parent_epw.process_label == "EpwPrepWorkChain": + epw_source = ( + parent_epw.base.links.get_outgoing(link_label_filter="epw_base") + .first() + .node + ) + elif parent_epw.process_label == "EpwCalculation": + epw_source = parent_epw + elif parent_epw.process_label == "EpwBaseWorkChain": + epw_source = parent_epw + else: + raise ValueError(f"Invalid parent_epw process: {parent_epw.process_label}") + + if parent_folder_epw is None: + if ( + epw_source.inputs.epw.code.computer.hostname + != epw_code.computer.hostname + ): + raise ValueError( + "The `epw_code` must be configured on the same computer as that where the `parent_epw` was run." + ) + parent_folder_epw = parent_epw.outputs.epw_folder + else: + # TODO: Add check to make sure parent_folder_epw is on same computer as epw_code + pass + + # for epw_namespace in ("epw_mobility","epw_interpo"): + epw_namespace = "epw_mobility" + if epw_namespace == "epw_mobility": + epw_inputs = inputs.get(epw_namespace, None) + + epw_builder = EpwBaseWorkChain.get_builder_from_protocol( + code=epw_code, + structure=epw_source.caller.caller.inputs.structure, + protocol=protocol, + overrides=epw_inputs, + ) + + + epw_builder.code = epw_code + + epw_builder.kpoints = epw_source.inputs.kpoints + epw_builder.qpoints = epw_source.inputs.qpoints + if epw_inputs and "settings" in epw_inputs: + epw_builder.settings = orm.Dict(epw_inputs["settings"]) + # if "settings" in epw_inputs: + # epw_builder.settings = orm.Dict(epw_inputs["settings"]) + + builder[epw_namespace] = epw_builder + + if isinstance(inputs["interpolation_distance"], float): + builder.interpolation_distance = orm.Float(inputs["interpolation_distance"]) + if isinstance(inputs["interpolation_distance"], list): + builder.interpolation_distance = orm.List(inputs["interpolation_distance"]) + + builder.convergence_threshold = orm.Float(inputs["convergence_threshold"]) + builder.structure = epw_source.caller.caller.inputs.structure + builder.parent_folder_epw = parent_folder_epw + # builder.qpoints_distance = orm.Float(inputs["qpoints_distance"]) + # builder.kpoints_distance_scf = orm.Float(inputs["kpoints_distance_scf"]) + # builder.kpoints_factor_nscf = orm.Int(inputs["kpoints_factor_nscf"]) + builder.clean_workdir = orm.Bool(inputs["clean_workdir"]) + + return builder + + + def generate_reciprocal_points(self): + """Generate the qpoints and kpoints meshes for the `ph.x` and `pw.x` calculations.""" + + inputs = { + "structure": self.inputs.structure, + "distance": self.inputs.qpoints_distance, + "force_parity": self.inputs.get("kpoints_force_parity", orm.Bool(False)), + "metadata": {"call_link_label": "create_qpoints_from_distance"}, + } + qpoints = create_kpoints_from_distance(**inputs) # pylint: disable=unexpected-keyword-arg + inputs = { + "structure": self.inputs.structure, + "distance": self.inputs.kpoints_distance_scf, + "force_parity": self.inputs.get("kpoints_force_parity", orm.Bool(False)), + "metadata": {"call_link_label": "create_kpoints_scf_from_distance"}, + } + kpoints_scf = create_kpoints_from_distance(**inputs) + + qpoints_mesh = qpoints.get_kpoints_mesh()[0] + kpoints_nscf = orm.KpointsData() + kpoints_nscf.set_kpoints_mesh( + [v * self.inputs.kpoints_factor_nscf.value for v in qpoints_mesh] + ) + + self.ctx.qpoints = qpoints + self.ctx.kpoints_scf = kpoints_scf + self.ctx.kpoints_nscf = kpoints_nscf + + + def setup(self): + """Setup steps, i.e. initialise context variables.""" + intp = self.inputs.get("interpolation_distance") + if isinstance(intp, orm.List): + self.ctx.interpolation_list = list(split_list(intp).values()) + else: + self.ctx.interpolation_list = [intp] + + self.ctx.interpolation_list.sort() + self.ctx.iteration = 0 + self.ctx.final_interp = None + self.ctx.mobility_values = [] + self.ctx.is_converged = False + self.ctx.degaussq = None + + def should_run_conv_test(self): + """Return True if there are still distances left in the list to test.""" + # 只要列表不为空,就返回 True,继续进入 run_conv + return len(self.ctx.interpolation_list) > 0 + + + def should_run_conv(self): + """Check if the convergence loop should continue or not.""" + if "convergence_threshold" in self.inputs: + try: + # Need at least 2 runs to check for convergence + if len(self.ctx.epw_interp) < 2: + return True + + prev_mobility = self.ctx.epw_interp[-2].outputs.output_parameters.get_dict().get("mobility_iBTE") + new_mobility = self.ctx.epw_interp[-1].outputs.output_parameters.get_dict().get("mobility_iBTE") + + if prev_mobility is None or new_mobility is None: + self.report("Could not retrieve mobility from one of the previous calculations.") + return True # Continue running, maybe it will appear in the next one + + # Check for convergence, assuming mobility is a dictionary with values to compare. + # This part might need adjustment depending on the exact structure of `mobility_iBTE`. + # For simplicity, let's assume it's a single value for now. + is_converged = abs(prev_mobility - new_mobility) < self.inputs.convergence_threshold.value + self.ctx.is_converged = is_converged + self.report(f"Checking convergence: old={prev_mobility:.4f}, new={new_mobility:.4f} -> Converged: {is_converged}") + + except (AttributeError, IndexError, KeyError) as e: + self.report(f"Not enough data to check convergence, continuing. Error: {e}") + + if not self.ctx.is_converged and not self.ctx.interpolation_list: + # Ran out of distances to try + if self.inputs.always_run_final: + self.report("Mobility not converged, but running final calculation as requested.") + else: + return self.exit_codes.ERROR_MOBILITY_NOT_CONVERGED + + else: + self.report("No `convergence_threshold` input provided, convergence is assumed.") + self.ctx.is_converged = True + + return not self.ctx.is_converged and self.ctx.interpolation_list + + + def run_conv(self): + """Run the EpwBaseWorkChain in interpolation mode for the current interpolation distance.""" + self.ctx.iteration += 1 + + inputs = AttributeDict(self.exposed_inputs(EpwBaseWorkChain, namespace="epw_mobility")) + inputs.parent_folder_epw = self.inputs.parent_folder_epw + inputs.qfpoints_distance = self.ctx.interpolation_list.pop(0) # Take the smallest distance first + + # === 处理 Quadrupole 文件的软链接 (ln -s) === + # 假设你的 WorkChain 输入里有名为 'quadruple_dir' 的 Str 类型的绝对路径 + if 'quadruple_dir' in self.inputs: + source_path = self.inputs.quadruple_dir.value + + # 获取 EPW 计算所用的计算机 UUID + # 注意:EpwBaseWorkChain 通常把计算的 inputs 放在 'epw' 命名空间下 + # 确保 inputs.epw.code 存在。如果你的 structure 不一样,请调整这里获取 computer 的方式 + computer = inputs.epw.code.computer + + # 准备 symlink 配置: [(uuid, 远程绝对路径, 目标文件名)] + symlink_item = (computer.uuid, source_path, 'quadrupole.fmt') + + # 确保字典路径存在,然后赋值 + if 'metadata' not in inputs.epw: + inputs.epw.metadata = {} + if 'options' not in inputs.epw.metadata: + inputs.epw.metadata.options = {} + + # 设置 remote_symlink_list + # 这会让 AiiDA 在计算开始前执行 ln -s /path/to/quadrupole.fmt ./quadrupole.fmt + inputs.epw.metadata.options['remote_symlink_list'] = [symlink_item] + # =============================================== + + if 'kfpoints_factor' in self.inputs: + inputs.kfpoints_factor = self.inputs.kfpoints_factor + # 选项 B: 或者是硬编码一个默认值 (比如 1.0 表示和粗网格一样,或者更高的倍数) + else: + inputs.kfpoints_factor = int(1.0) + + + if self.ctx.degaussq: + parameters = inputs.parameters.get_dict() + parameters.setdefault("INPUTEPW", {})["degaussq"] = self.ctx.degaussq + inputs.parameters = orm.Dict(parameters) + + inputs.metadata.call_link_label = f"conv_{self.ctx.iteration:02d}" + running = self.submit(EpwBaseWorkChain, **inputs) + self.report(f"Launched EpwBaseWorkChain<{running.pk}> for convergence run #{self.ctx.iteration}") + + return ToContext(epw_interp=append_(running)) + + def inspect_conv(self): + """Verify that the EpwBaseWorkChain in interpolation mode finished successfully.""" + workchain = self.ctx.epw_interp[-1] + + if not workchain.is_finished_ok: + self.report(f"Convergence EpwBaseWorkChain<{workchain.pk}> failed with exit status {workchain.exit_status}") + return self.exit_codes.ERROR_SUB_PROCESS_EPW_INTERP + + try: + mobility = workchain.outputs.output_parameters.get_dict().get("mobility_iBTE", "N/A") + self.report(f"Convergence run #{self.ctx.iteration} finished with mobility: {mobility}") + self.ctx.mobility_values.append(mobility) + + # Set degaussq from the first successful run if not already set + if self.ctx.degaussq is None and 'a2f' in workchain.outputs: + frequency = workchain.outputs.a2f.get_array("frequency") + self.ctx.degaussq = frequency[-1] / 100 + self.report(f"Set degaussq for subsequent runs to {self.ctx.degaussq}") + + except KeyError: + self.report("Could not find 'mobility_iBTE' in the output parameters of the convergence run.") + + + def should_run_final(self): + """Check if the final EpwBaseWorkChain should be run.""" + return self.ctx.is_converged or self.inputs.always_run_final.value + + def run_final_epw_mobility(self): + """Run the final EpwBaseWorkChain in mobility mode.""" + inputs = AttributeDict(self.exposed_inputs(EpwBaseWorkChain, namespace="epw_mobility")) + + # Use the remote folder from the last successful convergence run as the parent + parent_folder_epw = self.ctx.epw_interp[-1].outputs.remote_folder + inputs.parent_folder_epw = parent_folder_epw + inputs.kfpoints = parent_folder_epw.creator.inputs.kfpoints + inputs.qfpoints = parent_folder_epw.creator.inputs.qfpoints + + if self.ctx.degaussq: + parameters = inputs.parameters.get_dict() + parameters.setdefault("INPUTEPW", {})["degaussq"] = self.ctx.degaussq + inputs.parameters = orm.Dict(parameters) + + inputs.metadata.call_link_label = "final_mobility_run" + running = self.submit(EpwBaseWorkChain, **inputs) + self.report(f"Launched final EpwBaseWorkChain<{running.pk}> for mobility calculation.") + + return ToContext(final_epw_mobility=running) + + + def inspect_final_epw_mobility(self): + """Verify that the final EpwBaseWorkChain for mobility finished successfully.""" + workchain = self.ctx.final_epw_mobility + + if not workchain.is_finished_ok: + self.report(f"Final mobility EpwBaseWorkChain<{workchain.pk}> failed with exit status {workchain.exit_status}") + return self.exit_codes.ERROR_EPW_INTERPOLATION + + + def results(self): + """Attach the final results to the outputs.""" + self.report("Workchain finished successfully, attaching final outputs.") +# final_results = self.ctx.final_epw_mobility.outputs.output_parameters + # self.out("parameters", final_results) + # The mobility data is expected to be within the main output parameters Dict + # self.out("carrier_mobility", final_results) + + def on_terminated(self): + """Clean the working directories of all child calculations if `clean_workdir=True` in the inputs.""" + super().on_terminated() + + if self.inputs.clean_workdir.value is False: + self.report("remote folders will not be cleaned") + return + + cleaned_calcs = [] + + for called_descendant in self.node.called_descendants: + if isinstance(called_descendant, orm.CalcJobNode): + try: + called_descendant.outputs.remote_folder._clean() # pylint: disable=protected-access + cleaned_calcs.append(called_descendant.pk) + except (IOError, OSError, KeyError): + pass + + if cleaned_calcs: + self.report( + f"cleaned remote folders of calculations: {' '.join(map(str, cleaned_calcs))}" + ) diff --git a/src/aiida_epw/workflows/prep.py b/src/aiida_epw/workflows/prep.py index 5dcdca7..93f1486 100644 --- a/src/aiida_epw/workflows/prep.py +++ b/src/aiida_epw/workflows/prep.py @@ -5,7 +5,7 @@ from aiida import orm from aiida.common import AttributeDict -from aiida.engine import WorkChain, ToContext +from aiida.engine import WorkChain, ToContext, if_ from aiida_quantumespresso.workflows.ph.base import PhBaseWorkChain from aiida_quantumespresso.workflows.protocols.utils import ProtocolMixin @@ -94,14 +94,29 @@ def define(cls, spec): "qfpoints", "qfpoints_distance", "kfpoints_factor", - "parent_folder_ph", + # "parent_folder_ph", "parent_folder_nscf", "parent_folder_epw", "parent_folder_chk", ), namespace_options={"help": "Inputs for the `EpwBaseWorkChain`."}, ) - + spec.expose_inputs( + EpwBaseWorkChain, + namespace="epw_bands", + exclude=( + "structure", + "clean_workdir", + "kpoints", + "qpoints", + "kfpoints", + "qfpoints", + "qfpoints_distance", + "kfpoints_factor", + "parent_folder_epw", + ), + namespace_options={"help": "Inputs namespace for `EpwBaseWorkChain` that runs the `epw.x` calculation in interpolation mode, i.e. the interpolated electron and phonon band structures."}, + ) spec.output("retrieved", valid_type=orm.FolderData) spec.output("epw_folder", valid_type=orm.RemoteStashFolderData) @@ -113,6 +128,10 @@ def define(cls, spec): cls.inspect_ph, cls.run_epw, cls.inspect_epw, + if_(cls.should_run_epw_bands)( + cls.run_epw_bands, + cls.inspect_epw_bands, + ), cls.results, ) spec.exit_code( @@ -130,7 +149,11 @@ def define(cls, spec): "ERROR_SUB_PROCESS_FAILED_EPW", message="The `EpwWorkChain` sub process failed", ) - + spec.exit_code( + 406, + "ERROR_SUB_PROCESS_FAILED_EPW_BANDS", + message="The `EpwBaseWorkChain` sub process failed", + ) @classmethod def get_protocol_filepath(cls): """Return ``pathlib.Path`` to the ``.yaml`` file that defines the protocols.""" @@ -166,7 +189,9 @@ def get_builder_from_protocol( builder.structure = structure w90_bands_inputs = inputs.get("w90_bands", {}) - pseudo_family = w90_bands_inputs.pop("pseudo_family", None) + pseudo_family = inputs.pop("pseudo_family", None) + + if wannier_projection_type == WannierProjectionType.ATOMIC_PROJECTORS_QE: if reference_bands is None: @@ -212,9 +237,7 @@ def get_builder_from_protocol( builder.ph_base = ph_base # TODO: Here I have a loop for the epw builders for future extension of another epw bands interpolation - for namespace in [ - "epw_base", - ]: + for namespace in ["epw_base", "epw_bands"]: epw_inputs = inputs.get(namespace, None) if namespace == "epw_base": if "target_base" not in epw_inputs["metadata"]["options"]["stash"]: @@ -324,14 +347,34 @@ def run_ph(self): self.exposed_inputs(PhBaseWorkChain, namespace="ph_base") ) - scf_base_wc = ( - self.ctx.workchain_w90_bands.base.links.get_outgoing( - link_label_filter="scf" + # scf_base_wc = ( + # self.ctx.workchain_w90_bands.base.links.get_outgoing( + # link_label_filter="scf" + # ) + # .first() + # .node + # ) + # inputs.ph.parent_folder = scf_base_wc.outputs.remote_folder + + if 'parent_folder_ph' in self.inputs and self.inputs.parent_folder_ph.creator.process_label == "PhCalculation": + inputs.ph.parent_folder = self.inputs.parent_folder_ph + inputs.ph.qpoints = self.inputs.parent_folder_ph.creator.inputs.qpoints + else: + scf_base_wc = ( + self.ctx.workchain_w90_bands.base.links.get_outgoing( + link_label_filter="scf" + ) + .first() + .node ) - .first() - .node - ) - inputs.ph.parent_folder = scf_base_wc.outputs.remote_folder + inputs.ph.parent_folder = scf_base_wc.outputs.remote_folder + + inputs.qpoints = self.ctx.qpoints + + inputs.metadata.call_link_label = "ph_base" + workchain_node = self.submit(PhBaseWorkChain, **inputs) + self.report(f"launching PhBaseWorkChain<{workchain_node.pk}>") + inputs.qpoints = self.ctx.qpoints @@ -354,7 +397,7 @@ def inspect_ph(self): def run_epw(self): """Run the `EpwBaseWorkChain`.""" inputs = AttributeDict( - self.exposed_inputs(EpwBaseWorkChain), namespace="epw_base" + self.exposed_inputs(EpwBaseWorkChain, namespace="epw_base") ) # The EpwBaseWorkChain will take the parent folder of the previous @@ -402,6 +445,48 @@ def inspect_epw(self): f"EpwBaseWorkChain<{workchain.pk}> failed with exit status {workchain.exit_status}" ) return self.exit_codes.ERROR_SUB_PROCESS_FAILED_EPW + + def should_run_epw_bands(self): + """Check if the `EpwBaseWorkChain` should be run in bands mode.""" + return "epw_bands" in self.inputs + + def run_epw_bands(self): + """Run the `EpwBaseWorkChain` in bands mode.""" + inputs = AttributeDict( + self.exposed_inputs(EpwBaseWorkChain, namespace="epw_bands") + ) + if "bands_kpoints" in self.ctx.workchain_w90_bands.inputs: + bands_kpoints = self.ctx.workchain_w90_bands.inputs.bands_kpoints + else: + bands_kpoints = ( + self.ctx.workchain_w90_bands.base.links.get_outgoing( + link_label_filter="seekpath_structure_analysis" + ) + .first() + .node.outputs.explicit_kpoints + ) + + inputs.kpoints = self.ctx.kpoints_nscf + inputs.qpoints = self.ctx.qpoints + inputs.qfpoints = bands_kpoints + inputs.kfpoints = bands_kpoints + inputs.parent_folder_epw = self.ctx.workchain_epw.outputs.remote_folder + inputs.metadata.call_link_label = "epw_bands" + workchain_node = self.submit(EpwBaseWorkChain, **inputs) + self.report( + f"launching EpwBaseWorkChain<{workchain_node.pk}> in bands interpolation mode" + ) + + return {"workchain_epw_bands": workchain_node} + + def inspect_epw_bands(self): + """Verify that the `EpwBaseWorkChain` finished successfully.""" + workchain = self.ctx.workchain_epw_bands + if not workchain.is_finished_ok: + self.report( + f"EpwBaseWorkChain<{workchain.pk}> failed with exit status {workchain.exit_status}" + ) + return self.exit_codes.ERROR_SUB_PROCESS_FAILED_EPW_BANDS def results(self): """Add the most important results to the outputs of the work chain.""" @@ -430,3 +515,4 @@ def on_terminated(self): self.report( f"cleaned remote folders of calculations: {' '.join(map(str, cleaned_calcs))}" ) + diff --git a/src/aiida_epw/workflows/protocols/base_mob.yaml b/src/aiida_epw/workflows/protocols/base_mob.yaml new file mode 100644 index 0000000..cf14536 --- /dev/null +++ b/src/aiida_epw/workflows/protocols/base_mob.yaml @@ -0,0 +1,26 @@ +default_inputs: + clean_workdir: True + max_iterations: 5 + qfpoints_distance: 0.1 + kfpoints_factor: 2 + metadata: + options: + withmpi: True + parameters: + INPUTEPW: + temps: 300 + vme: 'dipole' + use_ws: True + +default_protocol: moderate +protocols: + moderate: + description: 'Protocol to perform a electron-phonon calculation at normal precision at moderate computational cost.' + precise: + description: 'Protocol to perform a electron-phonon calculation at high precision at higher computational cost.' + qfpoints_distance: 0.06 + kfpoints_factor: 2 + fast: + description: 'Protocol to perform a electron-phonon calculation at low precision at minimal computational cost for testing purposes.' + qfpoints_distance: 0.2 + kfpoints_factor: 1 diff --git a/src/aiida_epw/workflows/protocols/mobility.yaml b/src/aiida_epw/workflows/protocols/mobility.yaml new file mode 100644 index 0000000..d1a6e7b --- /dev/null +++ b/src/aiida_epw/workflows/protocols/mobility.yaml @@ -0,0 +1,76 @@ +default_inputs: + clean_workdir: False + interpolation_distance: + - 0.5 + - 0.3 + - 0.2 + - 0.1 + - 0.08 + - 0.06 + - 0.05 + - 0.04 + - 0.03 + convergence_threshold: 0.5 + epw_mobility: + metadata: + options: + resources: + num_machines: 1 + max_wallclock_seconds: 86400 # Twelve hours + withmpi: True + parameters: + INPUTEPW: + elph: True + restart: true + restart_step: 1000 + etf_mem: 3 + # --- I/O Settings --- + epbwrite: false + epbread: false + epwwrite: false + epwread: true + epmatkqread: false + selecqread: false + vme: 'dipole' #'wannier' + use_ws: true + lpolar: true + mp_mesh_k: true + lifc: false + # --- Carrier & Physics --- + carrier: true + ncarrier: 1.0e+13 + nstemp: 1 + temps: 300 + fsthick: 0.35 # Window of 0.4 eV + degaussw: 0.0 + efermi_read: true + fermi_energy: 10.79385284 + # --- Scattering & Mobility (BTE) --- + scattering: true + scattering_serta: true + int_mob: false + iterative_bte: true + mob_maxiter: 300 + broyden_beta: 1.0 + # --- Self Energy & A2F (Disabled) --- + elecselfen: false + phonselfen: false + a2f: false + # --- Magnetic Field --- + bfieldx: 0.0 + bfieldy: 0.0 + bfieldz: 1.0e-10 + + wannierize: False + + +default_protocol: moderate +protocols: + moderate: + description: 'Protocol to perform a electron-phonon calculation at normal precision at moderate computational cost.' + precise: + description: 'Protocol to perform a electron-phonon calculation at high precision at higher computational cost.' + interpolation_distance: 0.05 + fast: + description: 'Protocol to perform a electron-phonon calculation at low precision at minimal computational cost for testing purposes.' + interpolation_distance: 0.5 diff --git a/src/aiida_epw/workflows/protocols/prep.yaml b/src/aiida_epw/workflows/protocols/prep.yaml index 944537c..e2dbf09 100644 --- a/src/aiida_epw/workflows/protocols/prep.yaml +++ b/src/aiida_epw/workflows/protocols/prep.yaml @@ -1,5 +1,6 @@ default_inputs: clean_workdir: True + pseudo_family: 'PseudoDojo/0.5/PBE/SR/standard/upf' qpoints_distance: 0.5 kpoints_distance_scf: 0.15 kpoints_factor_nscf: 2 diff --git a/src/aiida_epw/workflows/protocols/prep_mob.yaml b/src/aiida_epw/workflows/protocols/prep_mob.yaml new file mode 100644 index 0000000..7afed25 --- /dev/null +++ b/src/aiida_epw/workflows/protocols/prep_mob.yaml @@ -0,0 +1,87 @@ +default_inputs: + clean_workdir: True + pseudo_family: 'PseudoDojo/0.5/PBE/SR/standard/upf' + qpoints_distance: 0.5 + kpoints_distance_scf: 0.15 + kpoints_factor_nscf: 2 + # To set for the NSCF step of the Wannier90 work chain + # One can either use `nscf` mode or `bands` mode. For the `nscf` mode, + # the `nosym` and `noinv` must be set to True to avoid pw.x automatically + # adds more (symmetry-equivalent) kpoints to the user-specified kpoints. + # For the `bands` mode, pw.x will use exactly what the user provided. + # In addition, it seems epw.x has some problem when restarting from a + # pw.x calculation with `nosym` and `noinv` set to True, therefore, we + # switch to the `bands` mode. + w90_bands: + nscf: + pw: + parameters: + SYSTEM: + nosym: False + noinv: False + CONTROL: + calculation: bands + ph_base: + ph: + settings: + PREPARE_FOR_EPW: True + epw_base: + metadata: + options: + resources: + num_machines: 1 + max_wallclock_seconds: 43200 # Twelve hours + withmpi: True + stash: + source_list: + - crystal.fmt + - dmedata.fmt + - epwdata.fmt + - selecq.fmt + - vmedata.fmt + - aiida.kgmap + - aiida.kmap + - aiida.ukk + - out/aiida.epmatwp + - save + parameters: + INPUTEPW: + elph: True + epbread: False + epbwrite: True + epwread: False + epwwrite: True + fsthick: 100 + temps: 300 + vme: 'dipole' + wannierize: False + etf_mem: 1 + lpolar: True + + + epw_bands: + options: + resources: + num_machines: 1 + withmpi: True + parameters: + INPUTEPW: + band_plot: True + elph: True + epbread: False + epbwrite: False + epwread: True + epwwrite: False + wannierize: False + + +default_protocol: moderate +protocols: + moderate: + description: 'Protocol to perform a electron-phonon calculation at normal precision at moderate computational cost.' + precise: + description: 'Protocol to perform a electron-phonon calculation at high precision at higher computational cost.' + qpoints_distance: 0.3 + fast: + description: 'Protocol to perform a electron-phonon calculation at low precision at minimal computational cost for testing purposes.' + qpoints_distance: 1.1