Example #1
0
def migrate_v8_to_v9(metadata, data, *args):  # pylint: disable=unused-argument
    """Migration of export files from v0.8 to v0.9."""
    old_version = '0.8'
    new_version = '0.9'

    verify_metadata_version(metadata, old_version)
    update_metadata(metadata, new_version)

    # Apply migrations
    migration_dbgroup_type_string(data)
Example #2
0
def migrate_v7_to_v8(metadata, data, *args):  # pylint: disable=unused-argument
    """Migration of export files from v0.7 to v0.8."""
    old_version = '0.7'
    new_version = '0.8'

    verify_metadata_version(metadata, old_version)
    update_metadata(metadata, new_version)

    # Apply migrations
    migration_default_link_label(data)
Example #3
0
def migrate_v6_to_v7(metadata, data, *args):  # pylint: disable=unused-argument
    """Migration of export files from v0.6 to v0.7"""
    old_version = '0.6'
    new_version = '0.7'

    verify_metadata_version(metadata, old_version)
    update_metadata(metadata, new_version)

    # Apply migrations
    migration_data_migration_legacy_process_attributes(data)
    remove_attribute_link_metadata(metadata)
Example #4
0
def migrate_v5_to_v6(metadata, data, *args):  # pylint: disable=unused-argument
    """Migration of export files from v0.5 to v0.6"""
    old_version = '0.5'
    new_version = '0.6'

    verify_metadata_version(metadata, old_version)
    update_metadata(metadata, new_version)

    # Apply migrations
    migration_serialize_datetime_objects(data)
    migration_migrate_legacy_job_calculation_data(data)
Example #5
0
def migrate_v1_to_v2(metadata, data, *args):
    """
    Migration of export files from v0.1 to v0.2, which means generalizing the
    field names with respect to the database backend

    :param metadata: the content of an export archive metadata.json file
    :param data: the content of an export archive data.json file
    """
    old_version = '0.1'
    new_version = '0.2'

    old_start = 'aiida.djsite'
    new_start = 'aiida.backends.djsite'

    verify_metadata_version(metadata, old_version)
    update_metadata(metadata, new_version)

    def get_new_string(old_string):
        """Replace the old module prefix with the new."""
        if old_string.startswith(old_start):
            return '{}{}'.format(new_start, old_string[len(old_start):])

        return old_string

    def replace_requires(data):
        """Replace the requires keys with new module path."""
        if isinstance(data, dict):
            new_data = {}
            for key, value in data.items():
                if key == 'requires' and value.startswith(old_start):
                    new_data[key] = get_new_string(value)
                else:
                    new_data[key] = replace_requires(value)
            return new_data

        return data

    for field in ['export_data']:
        for key in list(data[field]):
            if key.startswith(old_start):
                new_key = get_new_string(key)
                data[field][new_key] = data[field][key]
                del data[field][key]

    for field in ['unique_identifiers', 'all_fields_info']:
        for key in list(metadata[field].keys()):
            if key.startswith(old_start):
                new_key = get_new_string(key)
                metadata[field][new_key] = metadata[field][key]
                del metadata[field][key]

    metadata['all_fields_info'] = replace_requires(metadata['all_fields_info'])
Example #6
0
def migrate_v4_to_v5(metadata, data, *args):  # pylint: disable=unused-argument
    """
    Migration of export files from v0.4 to v0.5

    This is from migration 0034 (drop_node_columns_nodeversion_public) and onwards
    """
    old_version = '0.4'
    new_version = '0.5'

    verify_metadata_version(metadata, old_version)
    update_metadata(metadata, new_version)

    # Apply migrations
    migration_drop_node_columns_nodeversion_public(metadata, data)
    migration_drop_computer_transport_params(metadata, data)
Example #7
0
def migrate_v2_to_v3(metadata, data, *args):
    """
    Migration of export files from v0.2 to v0.3, which means adding the link
    types to the link entries and making the entity key names backend agnostic
    by effectively removing the prefix 'aiida.backends.djsite.db.models'

    :param data: the content of an export archive data.json file
    :param metadata: the content of an export archive metadata.json file
    """

    old_version = '0.2'
    new_version = '0.3'

    class LinkType(enum.Enum):  # pylint: disable=too-few-public-methods
        """This was the state of the `aiida.common.links.LinkType` enum before aiida-core v1.0.0a5"""

        UNSPECIFIED = 'unspecified'
        CREATE = 'createlink'
        RETURN = 'returnlink'
        INPUT = 'inputlink'
        CALL = 'calllink'

    class NodeType(enum.Enum):  # pylint: disable=too-few-public-methods
        """A simple enum of relevant node types"""

        NONE = 'none'
        CALC = 'calculation'
        CODE = 'code'
        DATA = 'data'
        WORK = 'work'

    entity_map = {
        'aiida.backends.djsite.db.models.DbNode': 'Node',
        'aiida.backends.djsite.db.models.DbLink': 'Link',
        'aiida.backends.djsite.db.models.DbGroup': 'Group',
        'aiida.backends.djsite.db.models.DbComputer': 'Computer',
        'aiida.backends.djsite.db.models.DbUser': '******',
        'aiida.backends.djsite.db.models.DbAttribute': 'Attribute'
    }

    verify_metadata_version(metadata, old_version)
    update_metadata(metadata, new_version)

    # Create a mapping from node uuid to node type
    mapping = {}
    for nodes in data['export_data'].values():
        for node in nodes.values():

            try:
                node_uuid = node['uuid']
                node_type_string = node['type']
            except KeyError:
                continue

            if node_type_string.startswith('calculation.job.'):
                node_type = NodeType.CALC
            elif node_type_string.startswith('calculation.inline.'):
                node_type = NodeType.CALC
            elif node_type_string.startswith('code.Code'):
                node_type = NodeType.CODE
            elif node_type_string.startswith('data.'):
                node_type = NodeType.DATA
            elif node_type_string.startswith('calculation.work.'):
                node_type = NodeType.WORK
            else:
                node_type = NodeType.NONE

            mapping[node_uuid] = node_type

    # For each link, deduce the link type and insert it in place
    for link in data['links_uuid']:

        try:
            input_type = NodeType(mapping[link['input']])
            output_type = NodeType(mapping[link['output']])
        except KeyError:
            raise DanglingLinkError('Unknown node UUID {} or {}'.format(
                link['input'], link['output']))

        # The following table demonstrates the logic for inferring the link type
        # (CODE, DATA) -> (WORK, CALC) : INPUT
        # (CALC)       -> (DATA)       : CREATE
        # (WORK)       -> (DATA)       : RETURN
        # (WORK)       -> (CALC, WORK) : CALL
        if input_type in [NodeType.CODE, NodeType.DATA
                          ] and output_type in [NodeType.CALC, NodeType.WORK]:
            link['type'] = LinkType.INPUT.value
        elif input_type == NodeType.CALC and output_type == NodeType.DATA:
            link['type'] = LinkType.CREATE.value
        elif input_type == NodeType.WORK and output_type == NodeType.DATA:
            link['type'] = LinkType.RETURN.value
        elif input_type == NodeType.WORK and output_type in [
                NodeType.CALC, NodeType.WORK
        ]:
            link['type'] = LinkType.CALL.value
        else:
            link['type'] = LinkType.UNSPECIFIED.value

    # Now we migrate the entity key names i.e. removing the 'aiida.backends.djsite.db.models' prefix
    for field in ['unique_identifiers', 'all_fields_info']:
        for old_key, new_key in entity_map.items():
            if old_key in metadata[field]:
                metadata[field][new_key] = metadata[field][old_key]
                del metadata[field][old_key]

    # Replace the 'requires' keys in the nested dictionaries in 'all_fields_info'
    for entity in metadata['all_fields_info'].values():
        for prop in entity.values():
            for key, value in prop.items():
                if key == 'requires' and value in entity_map:
                    prop[key] = entity_map[value]

    # Replace any present keys in the data.json
    for field in ['export_data']:
        for old_key, new_key in entity_map.items():
            if old_key in data[field]:
                data[field][new_key] = data[field][old_key]
                del data[field][old_key]
Example #8
0
def migrate_v3_to_v4(metadata, data, folder, *args):  # pylint: disable=unused-argument
    """
    Migration of export files from v0.3 to v0.4

    Note concerning migration 0032 - REV. 1.0.32:
    Remove legacy workflow tables: DbWorkflow, DbWorkflowData, DbWorkflowStep
    These were (according to Antimo Marrazzo) never exported.
    """
    old_version = '0.3'
    new_version = '0.4'

    verify_metadata_version(metadata, old_version)
    update_metadata(metadata, new_version)

    # Apply migrations in correct sequential order
    migration_base_data_plugin_type_string(data)
    migration_process_type(metadata, data)
    migration_code_sub_class_of_data(data)
    migration_add_node_uuid_unique_constraint(data)
    migration_migrate_builtin_calculations(data)
    migration_provenance_redesign(data)
    migration_dbgroup_name_to_label_type_to_type_string(metadata, data)
    migration_dbgroup_type_string_change_content(data)
    migration_calc_job_option_attribute_keys(data)
    migration_move_data_within_node_module(data)
    migration_trajectory_symbols_to_attribute(data, folder)
    migration_remove_node_prefix(data)
    migration_rename_parameter_data_to_dict(data)
    migration_dbnode_type_to_dbnode_node_type(metadata, data)
    migration_remove_dbcomputer_enabled(metadata, data)
    migration_replace_text_field_with_json_field(data)

    # Add Node Extras
    add_extras(data)

    # Update metadata.json with the new Log and Comment entities
    new_entities = {
        'Log': {
            'uuid': {},
            'time': {
                'convert_type': 'date'
            },
            'loggername': {},
            'levelname': {},
            'message': {},
            'metadata': {},
            'dbnode': {
                'related_name': 'dblogs',
                'requires': 'Node'
            }
        },
        'Comment': {
            'uuid': {},
            'ctime': {
                'convert_type': 'date'
            },
            'mtime': {
                'convert_type': 'date'
            },
            'content': {},
            'dbnode': {
                'related_name': 'dbcomments',
                'requires': 'Node'
            },
            'user': {
                'related_name': 'dbcomments',
                'requires': 'User'
            }
        }
    }
    metadata['all_fields_info'].update(new_entities)
    metadata['unique_identifiers'].update({'Log': 'uuid', 'Comment': 'uuid'})