Exemplo n.º 1
0
    def __init__(self, node):
        """Construct the Parser instance.

        :param node: the `CalcJobNode` that contains the results of the executed `CalcJob` process.
        """
        from aiida.common.log import AIIDA_LOGGER
        from aiida.orm.utils.log import create_logger_adapter

        self._logger = create_logger_adapter(AIIDA_LOGGER.getChild('parser').getChild(self.__class__.__name__), node)
        self._node = node
        self._outputs = extendeddicts.AttributeDict()
Exemplo n.º 2
0
# For further information please visit http://www.aiida.net               #
###########################################################################
""" Utility functions for export of AiiDA entities """
# pylint: disable=too-many-locals,too-many-branches,too-many-nested-blocks
from enum import Enum
import warnings

from aiida.orm import QueryBuilder, ProcessNode
from aiida.common.log import AIIDA_LOGGER, LOG_LEVEL_REPORT, override_log_formatter
from aiida.common.warnings import AiidaDeprecationWarning

from aiida.tools.importexport.common import exceptions
from aiida.tools.importexport.common.config import (
    file_fields_to_model_fields, entity_names_to_entities, get_all_fields_info)

EXPORT_LOGGER = AIIDA_LOGGER.getChild('export')


class ExportFileFormat(str, Enum):
    """Export file formats"""
    ZIP = 'zip'
    TAR_GZIPPED = 'tar.gz'


def fill_in_query(partial_query,
                  originating_entity_str,
                  current_entity_str,
                  tag_suffixes=None,
                  entity_separator='_'):
    """
    This function recursively constructs QueryBuilder queries that are needed
Exemplo n.º 3
0
# For further information please visit http://www.aiida.net               #
###########################################################################
""" Utility functions for import of AiiDA entities """
# pylint: disable=inconsistent-return-statements,too-many-branches
import os

import click
from tabulate import tabulate

from aiida.common.log import AIIDA_LOGGER, LOG_LEVEL_REPORT
from aiida.common.utils import get_new_uuid
from aiida.orm import QueryBuilder, Comment

from aiida.tools.importexport.common import exceptions

IMPORT_LOGGER = AIIDA_LOGGER.getChild('import')


def merge_comment(incoming_comment, comment_mode):
    """ Merge comment according comment_mode
    :return: New UUID if new Comment should be created, else None.
    """

    # Get incoming Comment's UUID, 'mtime', and 'comment'
    incoming_uuid = str(incoming_comment['uuid'])
    incoming_mtime = incoming_comment['mtime']
    incoming_content = incoming_comment['content']

    # Compare modification time 'mtime'
    if comment_mode == 'newest':
        # Get existing Comment's 'mtime' and 'content'
Exemplo n.º 4
0
import copy
import dataclasses
import os
from pathlib import Path
import tarfile
from types import TracebackType
from typing import Any, Dict, List, Optional, Tuple, Type, Union
import zipfile

from aiida.common import json  # handles byte dumps
from aiida.common.log import AIIDA_LOGGER

__all__ = ('ArchiveMetadata', 'detect_archive_type', 'null_callback',
           'CacheFolder')

ARCHIVE_LOGGER = AIIDA_LOGGER.getChild('archive')


@dataclasses.dataclass
class ArchiveMetadata:
    """Class for storing metadata about this archive.

    Required fields are necessary for importing the data back into AiiDA,
    whereas optional fields capture information about the export/migration process(es)
    """
    export_version: str
    aiida_version: str
    # Entity type -> database ID key
    unique_identifiers: Dict[str, str] = dataclasses.field(repr=False)
    # Entity type -> database key -> meta parameters
    all_fields_info: Dict[str, Dict[str,
Exemplo n.º 5
0
from aiida.tools.importexport.archive.common import (ArchiveMetadata,
                                                     null_callback)
from aiida.tools.importexport.common.config import NODE_ENTITY_NAME, GROUP_ENTITY_NAME
from aiida.tools.importexport.common.utils import export_shard_uuid

__all__ = (
    'ArchiveReaderAbstract',
    'ARCHIVE_READER_LOGGER',
    'ReaderJsonBase',
    'ReaderJsonFolder',
    'ReaderJsonTar',
    'ReaderJsonZip',
    'get_reader',
)

ARCHIVE_READER_LOGGER = AIIDA_LOGGER.getChild('archive.reader')


def get_reader(file_format: str) -> Type['ArchiveReaderAbstract']:
    """Return the available writer classes."""
    readers = {
        ExportFileFormat.ZIP: ReaderJsonZip,
        ExportFileFormat.TAR_GZIPPED: ReaderJsonTar,
        'folder': ReaderJsonFolder,
    }

    if file_format not in readers:
        raise ValueError(
            f'Can only read in the formats: {tuple(readers.keys())}, please specify one for "file_format".'
        )
Exemplo n.º 6
0
# For further information please visit http://www.aiida.net               #
###########################################################################
"""Functions to delete entities from the database, preserving provenance integrity."""
import logging
from typing import Callable, Iterable, Optional, Set, Tuple, Union
import warnings

from aiida.backends.utils import delete_nodes_and_connections
from aiida.common.log import AIIDA_LOGGER
from aiida.common.warnings import AiidaDeprecationWarning
from aiida.orm import Group, Node, QueryBuilder, load_node
from aiida.tools.graph.graph_traversers import get_nodes_delete

__all__ = ('DELETE_LOGGER', 'delete_nodes', 'delete_group_nodes')

DELETE_LOGGER = AIIDA_LOGGER.getChild('delete')


def delete_nodes(
    pks: Iterable[int],
    verbosity: Optional[int] = None,
    dry_run: Union[bool, Callable[[Set[int]], bool]] = True,
    force: Optional[bool] = None,
    **traversal_rules: bool
) -> Tuple[Set[int], bool]:
    """Delete nodes given a list of "starting" PKs.

    This command will delete not only the specified nodes, but also the ones that are
    linked to these and should be also deleted in order to keep a consistent provenance
    according to the rules explained in the Topics - Provenance section of the documentation.
    In summary:
Exemplo n.º 7
0
from archive_path import TarPath, ZipPath, read_file_in_tar, read_file_in_zip

from aiida.common.log import AIIDA_LOGGER
from aiida.common.progress_reporter import get_progress_reporter, create_callback
from aiida.tools.importexport.common.exceptions import (ArchiveMigrationError,
                                                        CorruptArchive,
                                                        DanglingLinkError)
from aiida.tools.importexport.common.config import ExportFileFormat
from aiida.tools.importexport.archive.common import CacheFolder
from aiida.tools.importexport.archive.migrations import MIGRATE_FUNCTIONS

__all__ = ('ArchiveMigratorAbstract', 'ArchiveMigratorJsonBase',
           'ArchiveMigratorJsonZip', 'ArchiveMigratorJsonTar',
           'MIGRATE_LOGGER', 'get_migrator')

MIGRATE_LOGGER = AIIDA_LOGGER.getChild('migrate')


def get_migrator(file_format: str) -> Type['ArchiveMigratorAbstract']:
    """Return the available archive migrator classes."""
    migrators = {
        ExportFileFormat.ZIP: ArchiveMigratorJsonZip,
        ExportFileFormat.TAR_GZIPPED: ArchiveMigratorJsonTar,
    }

    if file_format not in migrators:
        raise ValueError(
            f'Can only migrate in the formats: {tuple(migrators.keys())}, please specify one for "file_format".'
        )

    return cast(Type[ArchiveMigratorAbstract], migrators[file_format])