Пример #1
0
    def get(self, *args, **kwargs):
        """GET request to return the file as attachment"""
        timeline = get_backend_api('timeline_backend')

        # Get File object
        try:
            file = File.objects.get(sodar_uuid=kwargs['file'])

        except File.DoesNotExist:
            messages.error(self.request, 'File object not found!')

            return redirect(
                reverse('filesfolders:list',
                        kwargs={'project': kwargs['project']}))

        # Get corresponding FileData object with file content
        try:
            file_data = FileData.objects.get(file_name=file.file.name)

        except FileData.DoesNotExist:
            messages.error(self.request, 'File data not found!')

            return redirect(
                reverse('filesfolders:list',
                        kwargs={'project': kwargs['project']}))

        # Open file for serving
        try:
            file_content = storage.open(file_data.file_name)

        except Exception as ex:
            print({}.format(ex))  # DEBUG

            messages.error(self.request, 'Error opening file!')

            return redirect(
                reverse('filesfolders:list',
                        kwargs={'project': kwargs['project']}))

        # Return file as attachment
        response = HttpResponse(FileWrapper(file_content),
                                content_type=file_data.content_type)

        if SERVE_AS_ATTACHMENT:
            response['Content-Disposition'] = 'attachment; filename={}'.format(
                file.name)

        if not self.request.user.is_anonymous:
            # Add event in Timeline
            if timeline:
                tl_event = timeline.add_event(
                    project=file.project,
                    app_name=APP_NAME,
                    user=self.request.user,
                    event_name='file_serve',
                    description='serve file {file}',
                    classified=True,
                    status_type='INFO',
                )
                tl_event.add_object(file, 'file', file.name)

        return response
Пример #2
0
    def form_valid(self, form):
        """Override form_valid() for zip file unpacking"""
        timeline = get_backend_api('timeline_backend')

        ######################
        # Regular file upload
        ######################

        if not form.cleaned_data.get('unpack_archive'):
            return super().form_valid(form)

        #####################
        # Zip file unpacking
        #####################

        file = form.cleaned_data.get('file')
        folder = form.cleaned_data.get('folder')
        project = self.get_project(self.request, self.kwargs)

        # Build redirect URL
        # TODO: Repetition, put this in a mixin?
        if folder:
            re_kwargs = {'folder': folder.sodar_uuid}

        else:
            re_kwargs = {'project': project.sodar_uuid}

        redirect_url = reverse('filesfolders:list', kwargs=re_kwargs)

        try:
            zip_file = ZipFile(file)

        except Exception as ex:
            messages.error(self.request,
                           'Unable to extract zip file: {}'.format(ex))
            return redirect(redirect_url)

        new_folders = []
        new_files = []

        with transaction.atomic():
            for f in [f for f in zip_file.infolist() if not f.is_dir()]:
                # Create subfolders if any
                current_folder = folder

                for zip_folder in f.filename.split('/')[:-1]:
                    try:
                        current_folder = Folder.objects.get(
                            name=zip_folder,
                            project=project,
                            folder=current_folder,
                        )

                    except Folder.DoesNotExist:
                        current_folder = Folder.objects.create(
                            name=zip_folder,
                            project=project,
                            folder=current_folder,
                            owner=self.request.user,
                        )
                        new_folders.append(current_folder)

                # Save file
                file_name_nopath = f.filename.split('/')[-1]

                unpacked_file = File(
                    name=file_name_nopath,
                    project=project,
                    folder=current_folder,
                    owner=self.request.user,
                    secret=build_secret(),
                )
                content_file = ContentFile(zip_file.read(f.filename))
                unpacked_file.file.save(file_name_nopath, content_file)
                unpacked_file.save()
                new_files.append(unpacked_file)

        # Add timeline events
        for new_folder in new_folders:
            self._add_item_modify_event(obj=new_folder,
                                        request=self.request,
                                        view_action='create')

        for new_file in new_files:
            self._add_item_modify_event(obj=new_file,
                                        request=self.request,
                                        view_action='create')

        if timeline:
            timeline.add_event(
                project=project,
                app_name=APP_NAME,
                user=self.request.user,
                event_name='archive_extract',
                description='Extract from archive "{}", create {} folders '
                'and {} files'.format(file.name, len(new_folders),
                                      len(new_files)),
                extra_data={
                    'new_folders': [f.name for f in new_folders],
                    'new_files': [f.name for f in new_files],
                },
                status_type='OK',
            )

        messages.success(
            self.request,
            'Extracted {} files in folder "{}" from archive "{}"'.format(
                len([f for f in zip_file.infolist() if not f.is_dir()]),
                folder.name if folder else 'root',
                file.name,
            ),
        )
        return redirect(redirect_url)
Пример #3
0
    def _finalize_edit(self, edit_count, target_folder, **kwargs):
        """Finalize executed batch operation"""
        timeline = get_backend_api('timeline_backend')

        edit_suffix = 's' if edit_count != 1 else ''
        fail_suffix = 's' if len(self.failed) != 1 else ''

        if len(self.failed) > 0:
            messages.warning(
                self.request,
                'Unable to edit {} item{}, check '
                'permissions and target folder! Failed: {}'.format(
                    len(self.failed),
                    fail_suffix,
                    ', '.join(f.name for f in self.failed),
                ),
            )

        if edit_count > 0:
            messages.success(
                self.request,
                'Batch {} {} item{}.'.format(
                    'deleted' if self.batch_action == 'delete' else 'moved',
                    edit_count,
                    edit_suffix,
                ),
            )

        # Add event in Timeline
        if timeline:
            extra_data = {
                'items': [x.name for x in self.items],
                'failed': [x.name for x in self.failed],
            }

            tl_event = timeline.add_event(
                project=Project.objects.filter(
                    sodar_uuid=self.project.sodar_uuid).first(),
                app_name=APP_NAME,
                user=self.request.user,
                event_name='batch_{}'.format(self.batch_action),
                description='batch {} {} item{} {} {}'.format(
                    self.batch_action,
                    edit_count,
                    edit_suffix,
                    '({} failed)'.format(len(self.failed))
                    if len(self.failed) > 0 else '',
                    'to {target_folder}'
                    if self.batch_action == 'move' and target_folder else '',
                ),
                extra_data=extra_data,
                status_type='OK' if edit_count > 0 else 'FAILED',
            )

            if self.batch_action == 'move' and target_folder:
                tl_event.add_object(target_folder, 'target_folder',
                                    target_folder.get_path())

        if 'folder' in kwargs:
            re_kwargs = {'folder': kwargs['folder']}

        else:
            re_kwargs = {'project': kwargs['project']}

        return redirect(reverse('filesfolders:list', kwargs=re_kwargs))
Пример #4
0
def check_backend(name):
    """Return True if backend app is available, else False"""
    return True if get_backend_api(name) else False
Пример #5
0
 def setUp(self):
     super().setUp()
     self.cache_backend = get_backend_api('sodar_cache')
Пример #6
0
"""Django command for rebuilding cohort statistics after import."""

import aldjemy
from django.contrib.auth import get_user_model
from django.core.exceptions import ObjectDoesNotExist
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from django.conf import settings

from projectroles.models import Project
from projectroles.plugins import get_backend_api
from variants.variant_stats import rebuild_project_variant_stats
from variants.helpers import SQLALCHEMY_ENGINE

timeline = get_backend_api("timeline_backend")

#: The User model to use.
User = get_user_model()


class Command(BaseCommand):
    """Implementation of rebuilding project-wide statistics.

    All steps are executed in a transaction, so no stale state is used or left in the database.
    """

    #: Help message displayed on the command line.
    help = "Import case from PED file and varfish-annotator output."

    def add_arguments(self, parser):
        """Add the command's argument to the ``parser``."""
Пример #7
0
class RemoteProjectAPI:
    """Remote project data handling API"""

    #: Remote data retrieved from source site
    remote_data = None

    #: Remote source site currently being worked with
    source_site = None

    #: Timeline API
    timeline = get_backend_api('timeline_backend')

    #: User for storing timeline events
    tl_user = None

    #: Default owner for projects
    default_owner = None

    #: Updated parent projects in current sync operation
    updated_parents = []

    # Internal functions -------------------------------------------------------

    @staticmethod
    def _update_obj(obj, data, fields):
        """Update object"""
        for f in [f for f in fields if hasattr(obj, f)]:
            setattr(obj, f, data[f])
        obj.save()
        return obj

    def _sync_user(self, uuid, u_data):
        """Synchronize LDAP user based on source site data"""
        # Update existing user
        try:
            user = User.objects.get(username=u_data['username'])
            updated_fields = []

            for k, v in u_data.items():
                if (k not in ['groups', 'sodar_uuid'] and hasattr(user, k)
                        and str(getattr(user, k)) != str(v)):
                    updated_fields.append(k)

            if updated_fields:
                user = self._update_obj(user, u_data, updated_fields)
                u_data['status'] = 'updated'

                logger.info('Updated user: {} ({}): {}'.format(
                    u_data['username'], uuid, ', '.join(updated_fields)))

            # Check and update groups
            if sorted([g.name for g in user.groups.all()]) != sorted(
                    u_data['groups']):
                for g in user.groups.all():
                    if g.name not in u_data['groups']:
                        g.user_set.remove(user)
                        logger.debug(
                            'Removed user {} ({}) from group "{}"'.format(
                                user.username, user.sodar_uuid, g.name))

                existing_groups = [g.name for g in user.groups.all()]

                for g in u_data['groups']:
                    if g not in existing_groups:
                        group, created = Group.objects.get_or_create(name=g)
                        group.user_set.add(user)
                        logger.debug('Added user {} ({}) to group "{}"'.format(
                            user.username, user.sodar_uuid, g))

        # Create new user
        except User.DoesNotExist:
            create_values = {k: v for k, v in u_data.items() if k != 'groups'}
            user = User.objects.create(**create_values)
            u_data['status'] = 'created'
            logger.info('Created user: {}'.format(user.username))

            for g in u_data['groups']:
                group, created = Group.objects.get_or_create(name=g)
                group.user_set.add(user)
                logger.debug('Added user {} ({}) to group "{}"'.format(
                    user.username, user.sodar_uuid, g))

    def _handle_user_error(self, error_msg, project, role_uuid):
        logger.error(error_msg)
        self.remote_data['projects'][str(
            project.sodar_uuid)]['roles'][role_uuid]['status'] = 'error'
        self.remote_data['projects'][str(
            project.sodar_uuid)]['roles'][role_uuid]['status_msg'] = error_msg

    def _handle_project_error(self, error_msg, uuid, p, action):
        """Add and log project error in remote sync"""
        logger.error('{} {} "{}" ({}): {}'.format(
            action.capitalize(),
            p['type'].lower(),
            p['title'],
            uuid,
            error_msg,
        ))
        self.remote_data['projects'][uuid]['status'] = 'error'
        self.remote_data['projects'][uuid]['status_msg'] = error_msg

    def _update_project(self, project, p_data, parent):
        """Update an existing project during sync"""
        updated_fields = []
        uuid = str(project.sodar_uuid)

        for k, v in p_data.items():
            if (k not in ['parent', 'sodar_uuid', 'roles', 'readme']
                    and hasattr(project, k)
                    and str(getattr(project, k)) != str(v)):
                updated_fields.append(k)

        # README is a special case
        if project.readme.raw != p_data['readme']:
            updated_fields.append('readme')

        if updated_fields or project.parent != parent:
            project = self._update_obj(project, p_data, updated_fields)

            # Manually update parent
            if parent != project.parent:
                project.parent = parent
                project.save()
                updated_fields.append('parent')

            self.remote_data['projects'][uuid]['status'] = 'updated'

            if self.tl_user:  # Taskflow
                tl_desc = ('update project from remote site '
                           '"{{{}}}" ({})'.format('site',
                                                  ', '.join(updated_fields)))
                # TODO: Add extra_data
                tl_event = self.timeline.add_event(
                    project=project,
                    app_name=APP_NAME,
                    user=self.tl_user,
                    event_name='remote_project_update',
                    description=tl_desc,
                    status_type='OK',
                )
                tl_event.add_object(self.source_site, 'site',
                                    self.source_site.name)

            logger.info('Updated {}: {}'.format(
                p_data['type'].lower(), ', '.join(sorted(updated_fields))))

        else:
            logger.debug('Nothing to update in project details')

    def _create_project(self, uuid, p_data, parent):
        """Create a new project from source site data"""

        # Check existing title under the same parent
        try:
            old_project = Project.objects.get(parent=parent,
                                              title=p_data['title'])

            # Handle error
            error_msg = ('{} with the title "{}" exists under the same '
                         'parent, unable to create'.format(
                             old_project.type.capitalize(), old_project.title))
            self._handle_project_error(error_msg, uuid, p_data, 'create')
            return

        except Project.DoesNotExist:
            pass

        create_fields = ['title', 'description', 'readme']
        create_values = {k: v for k, v in p_data.items() if k in create_fields}
        create_values['type'] = p_data['type']
        create_values['parent'] = parent
        create_values['sodar_uuid'] = uuid
        project = Project.objects.create(**create_values)

        self.remote_data['projects'][uuid]['status'] = 'created'

        if self.tl_user:  # Taskflow
            tl_event = self.timeline.add_event(
                project=project,
                app_name=APP_NAME,
                user=self.tl_user,
                event_name='remote_project_create',
                description='create project from remote site {site}',
                status_type='OK',
            )
            # TODO: Add extra_data
            tl_event.add_object(self.source_site, 'site',
                                self.source_site.name)

        logger.info('Created {}'.format(p_data['type'].lower()))

    def _update_roles(self, project, p_data):
        """Create or update project roles"""
        # TODO: Refactor this
        uuid = str(project.sodar_uuid)

        allow_local = (settings.PROJECTROLES_ALLOW_LOCAL_USERS if hasattr(
            settings, 'PROJECTROLES_ALLOW_LOCAL_USERS') else False)

        for r_uuid, r in {k: v for k, v in p_data['roles'].items()}.items():
            # Ensure the Role exists
            try:
                role = Role.objects.get(name=r['role'])

            except Role.DoesNotExist:
                error_msg = 'Role object "{}" not found (assignment {})'.format(
                    r['role'], r_uuid)
                self._handle_user_error(error_msg, project, r_uuid)
                continue

            # Ensure the user is valid
            if ('@' not in r['user'] and not allow_local
                    and r['role'] != PROJECT_ROLE_OWNER):
                error_msg = ('Local user "{}" set for role "{}" but local '
                             'users are not allowed'.format(
                                 r['user'], r['role']))
                self._handle_user_error(error_msg, project, r_uuid)
                continue

            # If local user, ensure they exist
            elif ('@' not in r['user'] and allow_local
                  and r['role'] != PROJECT_ROLE_OWNER
                  and not User.objects.filter(username=r['user']).first()):
                error_msg = ('Local user "{}" not found, role of "{}" will '
                             'not be assigned'.format(r['user'], r['role']))
                self._handle_user_error(error_msg, project, r_uuid)
                continue

            # Use the default owner, if owner role for a non-LDAP user and local
            # users are not allowed
            if (r['role'] == PROJECT_ROLE_OWNER and
                (not allow_local
                 or not User.objects.filter(username=r['user']).first())
                    and '@' not in r['user']):
                role_user = self.default_owner

                # Notify of assigning role to default owner
                status_msg = (
                    'Non-LDAP/AD user "{}" set as owner, assigning role '
                    'to user "{}"'.format(r['user'],
                                          self.default_owner.username))
                self.remote_data['projects'][uuid]['roles'][r_uuid][
                    'user'] = self.default_owner.username
                self.remote_data['projects'][uuid]['roles'][r_uuid][
                    'status_msg'] = status_msg
                logger.info(status_msg)

            else:
                role_user = User.objects.get(username=r['user'])

            # Update RoleAssignment if it exists and is changed
            as_updated = False
            role_query = {'project__sodar_uuid': project.sodar_uuid}

            if r['role'] == PROJECT_ROLE_OWNER:
                role_query['role__name'] = PROJECT_ROLE_OWNER

            else:
                role_query['user'] = role_user

            old_as = RoleAssignment.objects.filter(**role_query).first()

            # Owner updating
            if old_as and r['role'] == PROJECT_ROLE_OWNER:
                # Update user or local admin user
                if ('@' in r['user'] and old_as.user != role_user) or (
                        role_user == self.default_owner
                        and project.get_owner().user != self.default_owner):
                    as_updated = True

                    # Delete existing role of the new owner if it exists
                    try:
                        RoleAssignment.objects.get(
                            project__sodar_uuid=project.sodar_uuid,
                            user=role_user,
                        ).delete()
                        logger.debug('Deleted existing role from '
                                     'user "{}"'.format(role_user.username))

                    except RoleAssignment.DoesNotExist:
                        logger.debug(
                            'No existing role found for user "{}"'.format(
                                role_user.username))

            # Updating of other roles
            elif (old_as and r['role'] != PROJECT_ROLE_OWNER
                  and old_as.role != role):
                as_updated = True

            if as_updated:
                old_as.role = role
                old_as.user = role_user
                old_as.save()
                self.remote_data['projects'][str(
                    project.sodar_uuid)]['roles'][r_uuid]['status'] = 'updated'

                if self.tl_user:  # Taskflow
                    tl_desc = ('update role to "{}" for {{{}}} from site '
                               '{{{}}}'.format(role.name, 'user', 'site'))
                    tl_event = self.timeline.add_event(
                        project=project,
                        app_name=APP_NAME,
                        user=self.tl_user,
                        event_name='remote_role_update',
                        description=tl_desc,
                        status_type='OK',
                    )
                    tl_event.add_object(role_user, 'user', role_user.username)
                    tl_event.add_object(self.source_site, 'site',
                                        self.source_site.name)

                logger.info('Updated role {}: {} = {}'.format(
                    r_uuid, role_user.username, role.name))

            # Create a new RoleAssignment
            elif not old_as:
                role_values = {
                    'sodar_uuid': r_uuid,
                    'project': project,
                    'role': role,
                    'user': role_user,
                }
                RoleAssignment.objects.create(**role_values)

                self.remote_data['projects'][str(
                    project.sodar_uuid)]['roles'][r_uuid]['status'] = 'created'

                if self.tl_user:  # Taskflow
                    tl_desc = 'add role "{}" for {{{}}} from site {{{}}}'.format(
                        role.name, 'user', 'site')
                    tl_event = self.timeline.add_event(
                        project=project,
                        app_name=APP_NAME,
                        user=self.tl_user,
                        event_name='remote_role_create',
                        description=tl_desc,
                        status_type='OK',
                    )
                    tl_event.add_object(role_user, 'user', role_user.username)
                    tl_event.add_object(self.source_site, 'site',
                                        self.source_site.name)

                logger.info('Created role {}: {} -> {}'.format(
                    r_uuid, role_user.username, role.name))

    def _remove_deleted_roles(self, project, p_data):
        """Remove roles for project deleted in source site"""
        timeline = get_backend_api('timeline_backend')
        uuid = str(project.sodar_uuid)
        current_users = [v['user'] for k, v in p_data['roles'].items()]

        deleted_roles = (RoleAssignment.objects.filter(
            project=project).exclude(role__name=PROJECT_ROLE_OWNER).exclude(
                user__username__in=current_users))
        deleted_count = deleted_roles.count()

        if deleted_count > 0:
            deleted_users = sorted([r.user.username for r in deleted_roles])

            for del_as in deleted_roles:
                del_user = del_as.user
                del_role = del_as.role
                del_uuid = str(del_as.sodar_uuid)
                del_as.delete()

                self.remote_data['projects'][uuid]['roles'][del_uuid] = {
                    'user': del_user.username,
                    'role': del_role.name,
                    'status': 'deleted',
                }

                if self.tl_user:  # Taskflow
                    tl_desc = ('remove role "{}" from {{{}}} by site '
                               '{{{}}}'.format(del_role.name, 'user', 'site'))
                    tl_event = timeline.add_event(
                        project=project,
                        app_name=APP_NAME,
                        user=self.tl_user,
                        event_name='remote_role_delete',
                        description=tl_desc,
                        status_type='OK',
                    )
                    tl_event.add_object(del_user, 'user', del_user.username)
                    tl_event.add_object(self.source_site, 'site',
                                        self.source_site.name)

            logger.info('Deleted {} removed role{} for: {}'.format(
                deleted_count,
                's' if deleted_count != 1 else '',
                ', '.join(deleted_users),
            ))

    def _sync_project(self, uuid, p_data):
        """Synchronize a project from source site. Create/update project, its
        parents and user roles"""
        # Add/update parents if not yet handled
        if (p_data['parent_uuid']
                and p_data['parent_uuid'] not in self.updated_parents):
            c_data = self.remote_data['projects'][p_data['parent_uuid']]
            self._sync_project(p_data['parent_uuid'], c_data)
            self.updated_parents.append(p_data['parent_uuid'])

        project = Project.objects.filter(type=p_data['type'],
                                         sodar_uuid=uuid).first()
        parent = None
        action = 'create' if not project else 'update'

        logger.info('Processing {} "{}" ({})..'.format(p_data['type'].lower(),
                                                       p_data['title'], uuid))

        # Get parent and ensure it exists
        if p_data['parent_uuid']:
            try:
                parent = Project.objects.get(sodar_uuid=p_data['parent_uuid'])

            except Project.DoesNotExist:
                # Handle error
                error_msg = 'Parent {} not found'.format(p_data['parent_uuid'])
                self._handle_project_error(error_msg, uuid, p_data, action)
                return

        # Update project
        if project:
            self._update_project(project, p_data, parent)

        # Create new project
        else:
            self._create_project(uuid, p_data, parent)
            project = Project.objects.filter(type=p_data['type'],
                                             sodar_uuid=uuid).first()

        # Create/update a RemoteProject object
        try:
            remote_project = RemoteProject.objects.get(site=self.source_site,
                                                       project=project)
            remote_project.level = p_data['level']
            remote_project.project = project
            remote_project.date_access = timezone.now()
            remote_action = 'updated'

        except RemoteProject.DoesNotExist:
            remote_project = RemoteProject.objects.create(
                site=self.source_site,
                project_uuid=project.sodar_uuid,
                project=project,
                level=p_data['level'],
                date_access=timezone.now(),
            )
            remote_action = 'created'

        logger.debug('{} RemoteProject {}'.format(remote_action.capitalize(),
                                                  remote_project.sodar_uuid))

        # Skip the rest if not updating roles
        if 'level' in p_data and p_data['level'] != REMOTE_LEVEL_READ_ROLES:
            return

        # Create/update roles
        # NOTE: Only update AD/LDAP user roles and local owner roles
        self._update_roles(project, p_data)

        # Remove deleted user roles
        self._remove_deleted_roles(project, p_data)

    # API functions ------------------------------------------------------------

    def get_target_data(self, target_site):
        """
        Get user and project data to be synchronized into a target site.

        :param target_site: RemoteSite object for the target site
        :return: Dict
        """
        sync_data = {'users': {}, 'projects': {}}

        def _add_user(user):
            if user.username not in [
                    u['username'] for u in sync_data['users'].values()
            ]:
                sync_data['users'][str(user.sodar_uuid)] = {
                    'username': user.username,
                    'name': user.name,
                    'first_name': user.first_name,
                    'last_name': user.last_name,
                    'email': user.email,
                    'groups': [g.name for g in user.groups.all()],
                }

        def _add_parent_categories(category, project_level):
            if category.parent:
                _add_parent_categories(category.parent, project_level)

            if str(category.sodar_uuid) not in sync_data['projects'].keys():
                cat_data = {
                    'title':
                    category.title,
                    'type':
                    PROJECT_TYPE_CATEGORY,
                    'parent_uuid':
                    str(category.parent.sodar_uuid)
                    if category.parent else None,
                    'description':
                    category.description,
                    'readme':
                    category.readme.raw,
                }

                if project_level == REMOTE_LEVEL_READ_ROLES:
                    cat_data['level'] = REMOTE_LEVEL_READ_ROLES
                    role_as = category.get_owner()
                    cat_data['roles'] = {}
                    cat_data['roles'][str(role_as.sodar_uuid)] = {
                        'user': role_as.user.username,
                        'role': role_as.role.name,
                    }
                    _add_user(role_as.user)

                else:
                    cat_data['level'] = REMOTE_LEVEL_READ_INFO

                sync_data['projects'][str(category.sodar_uuid)] = cat_data

        for rp in target_site.projects.all():
            project = rp.get_project()
            project_data = {
                'level': rp.level,
                'title': project.title,
                'type': PROJECT_TYPE_PROJECT,
            }

            # View available projects
            if rp.level == REMOTE_LEVEL_VIEW_AVAIL:
                project_data['available'] = True if project else False

            # Add info
            elif project and rp.level in [
                    REMOTE_LEVEL_READ_INFO,
                    REMOTE_LEVEL_READ_ROLES,
            ]:
                project_data['description'] = project.description
                project_data['readme'] = project.readme.raw

                # Add categories
                if project.parent:
                    _add_parent_categories(project.parent, rp.level)
                    project_data['parent_uuid'] = str(
                        project.parent.sodar_uuid)

            # If level is READ_ROLES, add categories and roles
            if rp.level in REMOTE_LEVEL_READ_ROLES:
                project_data['roles'] = {}

                for role_as in project.roles.all():
                    project_data['roles'][str(role_as.sodar_uuid)] = {
                        'user': role_as.user.username,
                        'role': role_as.role.name,
                    }
                    _add_user(role_as.user)

            sync_data['projects'][str(rp.project_uuid)] = project_data

        return sync_data

    def sync_source_data(self, site, remote_data, request=None):
        """
        Synchronize remote user and project data into the local Django database
        and return information of additions.

        :param site: RemoteSite object for the source site
        :param remote_data: Data returned by get_target_data() in the source
        :param request: Request object (optional)
        :return: Dict with updated remote_data
        :raise: ValueError if user from PROJECTROLES_DEFAULT_ADMIN is not found
        """
        self.source_site = site
        self.remote_data = remote_data
        self.updated_parents = []

        # Get default owner if remote projects have a local owner
        try:
            self.default_owner = User.objects.get(
                username=settings.PROJECTROLES_DEFAULT_ADMIN)

        except User.DoesNotExist:
            error_msg = (
                'Local user "{}" defined in PROJECTROLES_DEFAULT_ADMIN '
                'not found'.format(settings.PROJECTROLES_DEFAULT_ADMIN))
            logger.error(error_msg)
            raise ValueError(error_msg)

        # Set up timeline user
        if self.timeline:
            self.tl_user = request.user if request else self.default_owner

        logger.info('Synchronizing data from "{}"..'.format(site.name))

        # Return unchanged data if no projects with READ_ROLES are included
        if not {
                k: v
                for k, v in self.remote_data['projects'].items()
                if v['type'] == PROJECT_TYPE_PROJECT
                and v['level'] == REMOTE_LEVEL_READ_ROLES
        }.values():
            logger.info('No READ_ROLES access set, nothing to synchronize')
            return self.remote_data

        ########
        # Users
        ########
        logger.info('Synchronizing LDAP/AD users..')

        # NOTE: only sync LDAP/AD users
        for sodar_uuid, u_data in {
                k: v
                for k, v in self.remote_data['users'].items()
                if '@' in v['username']
        }.items():
            self._sync_user(sodar_uuid, u_data)

        logger.info('User sync OK')

        ##########################
        # Categories and Projects
        ##########################

        # Update projects
        logger.info('Synchronizing projects..')

        for sodar_uuid, p_data in {
                k: v
                for k, v in self.remote_data['projects'].items()
                if v['type'] == PROJECT_TYPE_PROJECT
                and v['level'] == REMOTE_LEVEL_READ_ROLES
        }.items():
            self._sync_project(sodar_uuid, p_data)

        logger.info('Synchronization OK')
        return self.remote_data
Пример #8
0
class StructuralVariantFlagsApiView(
        LoginRequiredMixin,
        LoggedInPermissionMixin,
        ProjectPermissionMixin,
        ProjectContextMixin,
        SingleObjectMixin,
        SingleObjectTemplateResponseMixin,
        View,
):
    """A view that returns JSON for the ``StructuralVariantFlags`` for a variant of a case and allows updates."""

    # TODO: create new permission
    permission_required = "variants.view_data"
    model = Case
    slug_url_kwarg = "case"
    slug_field = "sodar_uuid"

    def _model_to_dict(self, flags):
        """Helper that calls ``model_to_dict()`` and then replaces the case PK with the SODAR UUID."""
        return {
            **model_to_dict(flags), "case": str(self.get_object().sodar_uuid)
        }

    def get(self, *_args, **kwargs):
        try:
            flags = self._get_flags_for_variant(kwargs["sv"])
        except StructuralVariantFlags.DoesNotExist:
            raise Http404("No flags for variant yet")
        else:
            return HttpResponse(
                json.dumps(self._model_to_dict(flags), cls=UUIDEncoder),
                content_type="application/json",
            )

    @transaction.atomic
    def post(self, *_args, **kwargs):
        case = self.get_object()
        sv = StructuralVariant.objects.get(sv_uuid=kwargs["sv"])
        try:
            flags = self._get_flags_for_variant(kwargs["sv"])
        except StructuralVariantFlags.DoesNotExist:
            flags = StructuralVariantFlags(
                case=case,
                bin=sv.bin,
                release=sv.release,
                chromosome=sv.chromosome,
                start=sv.start,
                end=sv.end,
                sv_type=sv.sv_type,
                sv_sub_type=sv.sv_sub_type,
            )
            flags.save()
        form = StructuralVariantFlagsForm(self.request.POST, instance=flags)
        try:
            flags = form.save()
        except ValueError as e:
            raise Exception(str(form.errors)) from e
        timeline = get_backend_api("timeline_backend")
        if timeline:
            tl_event = timeline.add_event(
                project=self.get_project(self.request, self.kwargs),
                app_name="svs",
                user=self.request.user,
                event_name="flags_set",
                description=
                "set flags for structural variant %s in case {case}: {extra-flag_values}"
                % sv,
                status_type="OK",
                extra_data={"flag_values": flags.human_readable()},
            )
            tl_event.add_object(obj=case, label="case", name=case.name)
        if flags.no_flags_set():
            flags.delete()
            result = {"message": "erased"}
        else:
            result = self._model_to_dict(flags)
        return HttpResponse(json.dumps(result, cls=UUIDEncoder),
                            content_type="application/json")
Пример #9
0
 def setUp(self):
     super().setUp()
     self.timeline = get_backend_api('timeline_backend')
Пример #10
0
def submit_distiller(job):
    """Submit a case to MutationDistiller"""
    job.mark_start()
    timeline = get_backend_api("timeline_backend")
    if timeline:
        tl_event = timeline.add_event(
            project=job.project,
            app_name="variants",
            user=job.bg_job.user,
            event_name="case_submit_distiller",
            description="submitting {case_name} case to MutationDistiller",
            status_type="INIT",
        )
        tl_event.add_object(obj=job.case,
                            label="case_name",
                            name=job.case.name)
    try:
        data = {
            "name": "%(name)s (sodar:%(project)s varfish:%(case)s)" % {
                "name": job.case.name,
                "project": job.project.sodar_uuid,
                "case": job.case.sodar_uuid,
            }
        }
        if job.bg_job.user.email:
            data["email"] = job.bg_job.user.email
        with CaseExporterVcf(job, job.case) as exporter:
            job.add_log_entry("Creating temporary VCF file...")
            files = {"filename": exporter.write_tmp_file()}
            job.add_log_entry("Done creating temporary VCF file.")
            job.add_log_entry("Submitting to MutationDistiller.org...")
            response = requests.post(DISTILLER_POST_URL,
                                     data=data,
                                     files=files)
            job.add_log_entry("Done submitting to MutationDistiller.org")
        if not response.ok:
            job.mark_error("HTTP status code: {}".format(response.status_code))
            if timeline:
                tl_event.set_status(
                    "FAILED",
                    "MutationDistiller submission failed for {case_name}")
            return  # bail out!
        # Get ID in MutationDistiller
        job.add_log_entry("Parsing MutationDistiller response...")
        distiller_id = None
        soup = BeautifulSoup(response.text, "html.parser")
        for meta in soup.find_all("meta"):
            if meta.attrs.get("http-equiv") == "refresh":
                url = meta.attrs.get("content").split("=")[-1]
                job.add_log_entry("URL = {}".format(url))
                m = re.match(r"/temp/QE/vcf_([^/]+)/progress.html", url)
                if m:
                    distiller_id = m.group(1)
                job.add_log_entry("Distiller ID = {}".format(distiller_id))
        if not distiller_id:
            job.mark_error("Could not find MutationDistiller ID from response")
            if timeline:
                tl_event.set_status(
                    "FAILED",
                    "Could not find MutationDistiller ID from response")
            return  # bail out!
        job.distiller_project_id = distiller_id
        job.save()
Пример #11
0
    def handle(self, *args, **options):
        if 'taskflow' not in settings.ENABLED_BACKEND_PLUGINS:
            print_msg('Taskflow not enabled in settings, cancelled!')
            raise CommandError

        taskflow = get_backend_api('taskflow')

        if not taskflow:
            print_msg('Taskflow backend plugin not available, cancelled!')
            raise CommandError

        def submit_sync(app_name, sync_data, raise_exception=False):
            """Submit flows found in an app's sync_data structure"""
            for item in sync_data:
                project = Project.objects.get(sodar_uuid=item['project_uuid'])

                print_msg('Syncing flow "{}" by {} for "{}" ({})'.format(
                    item['flow_name'],
                    app_name,
                    project.title,
                    project.sodar_uuid,
                ))

                try:
                    taskflow.submit(
                        project_uuid=item['project_uuid'],
                        flow_name=item['flow_name'],
                        flow_data=item['flow_data'],
                        targets=TARGETS,
                    )

                except taskflow.FlowSubmitException as ex:
                    print_msg('Exception raised by flow!')
                    print(str(ex))

                    # If we don't want to continue on failure
                    if raise_exception:
                        raise ex

        print_msg('Synchronizing project data with taskflow...')
        print_msg('Target(s) = ' + ', '.join([t for t in TARGETS]))

        # Only sync PROJECT type projects as we (currently) don't have any
        # use for CATEGORY projects in taskflow
        projects = Project.objects.filter(
            type=PROJECT_TYPE_PROJECT).order_by('pk')

        ####################
        # Projectroles sync
        ####################

        # NOTE: For projectroles, this is done here as projects must be created
        #       or we can not continue with sync.. Also, removed projects are
        #       NOT deleted automatically (they shouldn't be deleted anyway).
        #       We first set up the projects and exit if syncing them fails.

        project_sync_data = []
        role_sync_data = []

        for project in projects:
            owner_as = project.get_owner()

            # Create project
            project_sync_data.append({
                'project_uuid': str(project.sodar_uuid),
                'project_title': project.title,
                'flow_name': 'project_create',
                'flow_data': {
                    'project_title':
                    project.title,
                    'project_description':
                    project.description,
                    'parent_uuid':
                    str(project.parent.sodar_uuid) if project.parent else 0,
                    'owner_username':
                    owner_as.user.username,
                    'owner_uuid':
                    str(owner_as.user.sodar_uuid),
                    'owner_role_pk':
                    owner_as.role.pk,
                },
            })

            # Set up roles
            role_sync_data.append({
                'project_uuid': str(project.sodar_uuid),
                'project_title': project.title,
                'flow_name': 'role_sync_delete_all',
                'flow_data': {
                    'owner_username': owner_as.user.username
                },
            })

            for role_as in project.roles.exclude(role=Role.objects.get(
                    name=SODAR_CONSTANTS['PROJECT_ROLE_OWNER'])):
                role_sync_data.append({
                    'project_uuid': str(project.sodar_uuid),
                    'project_title': project.title,
                    'flow_name': 'role_update',
                    'flow_data': {
                        'username': role_as.user.username,
                        'user_uuid': str(role_as.user.sodar_uuid),
                        'role_pk': str(role_as.role.pk),
                    },
                })

        try:
            submit_sync('projectroles',
                        project_sync_data,
                        raise_exception=True)

        # In case of a failure here we can't continue with the rest of the sync
        except taskflow.FlowSubmitException:
            print_msg('Project creation failed! Unable to continue, exiting..')
            return

        submit_sync('projectroles', role_sync_data, raise_exception=False)

        ###########
        # App sync
        ###########

        plugins = get_active_plugins(plugin_type='project_app')

        for plugin in plugins:
            sync_data = plugin.get_taskflow_sync_data()
            print_msg('Synchronizing app "{}"...'.format(plugin.name))

            if sync_data:
                submit_sync(plugin.name, sync_data, raise_exception=False)

            else:
                print_msg('Nothing to synchronize.')

        print_msg('Project data synchronized.')