Exemple #1
0
def recollect_atomic_calculations(**kwargs):
    """
    Collect dynamical matrix files into a single folder, putting a different number at the end of
    each final dynamical matrix file, obtained from the input link, which corresponds to its place
    in the list of q-points originally generated by distribute_qpoints.
    
    :param kwargs: keys are the string representation of the hubbard atom index and the value is the 
        corresponding retrieved folder object.
    :return: FolderData object containing the perturbation files of the computed HpBaseWorkChain
    """
    import os
    import errno

    output_folder_sub = HpCalculation._OUTPUT_SUBFOLDER
    output_folder_raw = HpCalculation._FOLDER_RAW
    output_prefix = HpCalculation()._PREFIX

    # Initialize the merged folder, by creating the subdirectory for the perturbation files
    merged_folder = FolderData()
    folder_path = os.path.normpath(merged_folder.get_abs_path('.'))
    output_path = os.path.join(folder_path, output_folder_raw)

    try:
        os.makedirs(output_path)
    except OSError as error:
        if error.errno == errno.EEXIST and os.path.isdir(output_path):
            pass
        else:
            raise

    for atomic_site_index, retrieved_folder in kwargs.iteritems():
        filepath = os.path.join(
            output_folder_raw,
            '{}.chi.pert_{}.dat'.format(output_prefix, atomic_site_index))
        filepath_src = retrieved_folder.get_abs_path(filepath)
        filepath_dst = filepath
        merged_folder.add_path(filepath_src, filepath_dst)

    # TODO: currently the Hp code requires the .save folder that is written by the original
    # PwCalculation, for the final post-processing matrix collection step. It doesn't really need all
    # the information contained in that folder, and requiring it means, copying it from remote to a
    # local folder and then reuploading it to remote folder. This is unnecessarily heavy
    retrieved_folder = kwargs.values()[0]
    dirpath = os.path.join(output_folder_sub, output_prefix + '.save')
    dirpath_src = retrieved_folder.get_abs_path(dirpath)
    dirpath_dst = dirpath
    merged_folder.add_path(dirpath_src, dirpath_dst)

    retrieved_folder = kwargs.values()[0]
    filepath = os.path.join(output_folder_sub, output_prefix + '.occup')
    filepath_src = retrieved_folder.get_abs_path(filepath)
    filepath_dst = filepath
    merged_folder.add_path(filepath_src, filepath_dst)

    return merged_folder
Exemple #2
0
def migrate_as_main(work_dir,
                    input_rel_path,
                    output_rel_path,
                    resources=None,
                    input_links=None):
    """ migrate existing CRYSTAL17 calculation as a WorkCalculation,
    which imitates a ``crystal17.main`` calculation

    :param work_dir: the absolute path to the directory to holding the files
    :param input_rel_path: relative path (from work_dir) to .d12 file
    :param output_rel_path: relative path (from work_dir) to .out file
    :param resources: a dict of of job resource parameters (not yet implemented)
    :param input_links: a dict of existing nodes to link inputs to (allowed keys: 'structure', 'settings', 'parameters')

    Example of input_links={'structure': {"cif_file": CifNode}},
    will create a link (via a workcalculation) from the CifNode to the input StructureData

    :raise IOError: if the work_dir or files do not exist
    :raises aiida.common.exceptions.ParsingError: if the input parsing fails
    :raises aiida.parsers.exceptions.OutputParsingError: if the output parsing fails

    :return: the calculation node
    :rtype: aiida.orm.WorkCalculation

    """
    from aiida.orm.data.folder import FolderData
    from aiida_crystal17.calculations.cry_main import CryMainCalculation
    from aiida_crystal17.parsers.cry_basic import CryBasicParser

    calc = CryMainCalculation()
    parser_cls = CryBasicParser

    # TODO optionally use transport to remote work directory
    if not os.path.exists(work_dir):
        raise IOError("work_dir doesn't exist: {}".format(work_dir))
    input_path = os.path.join(work_dir, input_rel_path)
    if not os.path.exists(input_path):
        raise IOError("input_path doesn't exist: {}".format(input_path))
    output_path = os.path.join(work_dir, output_rel_path)
    if not os.path.exists(output_path):
        raise IOError("output_path doesn't exist: {}".format(output_path))

    if resources:
        raise NotImplementedError("saving resources to ImmigrantCalculation")
    # resources = {} if resources is None else resources

    inputs = create_inputs(input_path, output_path)

    psuccess, output_nodes = parse_mainout(output_path,
                                           parser_class=parser_cls.__name__,
                                           init_struct=inputs['structure'],
                                           init_settings=inputs['settings'])

    outparams = output_nodes.pop("parameters")
    perrors = outparams.get_attr("errors") + outparams.get_attr(
        "parser_warnings")

    if perrors or not psuccess:
        raise ParsingError(
            "the parser failed, raising the following errors:\n{}".format(
                "\n\t".join(perrors)))

    folder = FolderData()
    folder.add_path(input_path, calc._DEFAULT_INPUT_FILE)  # pylint: disable=protected-access
    folder.add_path(output_path, calc._DEFAULT_OUTPUT_FILE)  # pylint: disable=protected-access

    # create links from existing nodes to inputs
    input_links = {} if not input_links else input_links
    for key, nodes_dict in input_links.items():
        _run_dummy_workchain(
            nodes_dict,
            {key: inputs[key]},
        )

    # assign linknames
    inputs_dict = {
        calc.get_linkname("parameters"): inputs['parameters'],
        calc.get_linkname("structure"): inputs['structure'],
        calc.get_linkname("settings"): inputs['settings']
    }
    for el, basis in inputs["basis"].items():
        inputs_dict[calc.get_linkname_basisset(el)] = basis

    outputs_dict = {parser_cls.get_linkname_outparams(): outparams}
    if "settings" in output_nodes:
        outputs_dict[parser_cls.get_linkname_outsettings()] = output_nodes.pop(
            "settings")
    if "structure" in output_nodes:
        outputs_dict[parser_cls.get_linkname_outstructure(
        )] = output_nodes.pop("structure")
    if output_nodes:
        raise ParsingError("unknown key(s) in output_nodes: {}".format(
            list(output_nodes.keys())))

    outputs_dict["retrieved"] = folder

    calcnode = _run_dummy_workchain(inputs_dict, outputs_dict,
                                    CryMainImmigrant)

    calcnode.label = "CryMainImmigrant"
    calcnode.description = "an immigrated CRYSTAL17 calculation into the {} format".format(
        calc.__class__)

    return calcnode