Example #1
0
    def as_task(
        self,
        fun: t.Callable[[], t.Optional[TaskResultState]],
        *,
        eta: t.Optional[DatetimeWithTimezone] = None,
    ) -> bool:
        """Run the given ``fun`` as the task.

        .. warning::

            One of the first things this function do is committing the current
            session, however after running ``fun`` nothing is committed.

        :param fun: The function to run as the task, catching the exceptions it
            produces and storing them in this task result.
        :param eta: The time the task should run, if the current time is before
            this ETA the current celery task will be scheduled again and
            ``fun`` will not be called.

        :returns: ``True`` if the task ran, otherwise ``False``.
        """
        if not self.state.is_not_started:  # pragma: no cover
            logger.error('Cannot start task that has already started',
                         task_result=self)
            return False
        elif eta and current_task.maybe_delay_task(eta):
            return False

        self.state = TaskResultState.started
        db.session.commit()

        try:
            result_code = fun()
        except APIException as exc:
            self.state = TaskResultState.failed
            self.result = JSONResponse.dump_to_object(exc)
        except:  # pylint: disable=bare-except
            logger.warning('The task crashed', exc_info=True)
            self.state = TaskResultState.crashed
            self.result = JSONResponse.dump_to_object(
                APIException(
                    'The task failed for an unknown reason',
                    f'The task {self.id} failed with a uncaught exception',
                    APICodes.UNKOWN_ERROR, 400))
        else:
            self.result = None
            self.state = handle_none(result_code, TaskResultState.finished)

        return True
Example #2
0
def login_with_link(
    login_link_id: uuid.UUID
) -> MultipleExtendedJSONResponse[models.User.LoginResponse, models.User]:
    """Login with the given login link.

    .. :quickref: Login link; Login with a login link.

    This will only work when the assignment connected to this link is
    available, and the deadline has not expired. The received JWT token will
    only be valid until the 30 minutes after the deadline, and only in the
    course connected to this link.

    .. note::

        The scope of the returned token will change in the future, this will
        not be considered a breaking change.

    :param login_link_id: The id of the login link you want to use to login.

    :returns: The logged in user and an access token.
    """
    login_link = helpers.get_or_404(
        models.AssignmentLoginLink,
        login_link_id,
        also_error=lambda l:
        (not l.assignment.is_visible or not l.assignment.send_login_links))
    assignment = login_link.assignment
    deadline = assignment.deadline

    if assignment.state.is_hidden:
        assignment_id = assignment.id
        db.session.expire(assignment)

        assignment = models.Assignment.query.filter(
            models.Assignment.id == assignment_id).with_for_update().one()
        logger.info(
            'Assignment is still hidden, checking if we have to open it',
            assignment_id=assignment_id,
            state=assignment.state,
            available_at=assignment.available_at,
        )

        # We reload the assignment from the database, so we have to check again
        # if it really still is hidden.
        if assignment.state.is_hidden:
            now = helpers.get_request_start_time()
            if (assignment.available_at is not None
                    and now >= assignment.available_at):
                assignment.state = models.AssignmentStateEnum.open
                db.session.commit()
            else:
                time_left = cg_helpers.handle_none(
                    cg_helpers.on_not_none(
                        assignment.available_at,
                        lambda avail: humanize.timedelta(
                            avail - now,
                            no_prefix=True,
                        ),
                    ), 'an infinite amount')
                raise APIException(
                    ('The assignment connected to this login link is still'
                     ' not available, please wait for {} for it to become'
                     ' available.').format(time_left),
                    f'The assignment {assignment.id} is not available yet',
                    APICodes.INVALID_STATE, 409)

    if deadline is None or assignment.deadline_expired:
        raise APIException(
            ('The deadline for this assignment has already expired, so you'
             ' can no longer use this link.'),
            f'The deadline for the assignment {assignment.id} has expired',
            APICodes.OBJECT_EXPIRED, 400)

    logger.info('Logging in user with login link',
                user_to_login=login_link.user)

    auth.set_current_user(login_link.user)

    auth.AssignmentPermissions(assignment).ensure_may_see()
    jsonify_options.get_options().add_permissions_to_user = login_link.user

    return MultipleExtendedJSONResponse.make(
        {
            'user':
            login_link.user,
            'access_token':
            login_link.user.make_access_token(
                expires_at=deadline + timedelta(minutes=30),
                for_course=assignment.course,
            ),
        },
        use_extended=models.User,
    )
Example #3
0
def process_blackboard_zip(
    blackboard_zip: FileStorage,
    max_size: FileSize,
) -> t.MutableSequence[t.Tuple[blackboard.SubmissionInfo, ExtractFileTree]]:
    """Process the given :py:mod:`.blackboard` zip file.

    This is done by extracting, moving and saving the tree structure of each
    submission.

    :param file: The blackboard gradebook to import
    :returns: List of tuples (BBInfo, tree)
    """
    def __get_files(info: blackboard.SubmissionInfo) -> t.List[FileStorage]:
        files = []
        for blackboard_file in info.files:
            if isinstance(blackboard_file, blackboard.FileInfo):
                name = blackboard_file.original_name
                bb_file = bb_tree.lookup_direct_child(blackboard_file.name)
                if not isinstance(bb_file, ExtractFileTreeFile):
                    raise AssertionError(
                        'File {} was not a file but instead was {}'.format(
                            blackboard_file.name, bb_file))
                stream = bb_file.backing_file.open()
            else:
                name = blackboard_file[0]
                stream = io.BytesIO(blackboard_file[1])

            if name == '__WARNING__':
                name = '__WARNING__ (User)'

            files.append(FileStorage(stream=stream, filename=name))
        return files

    with app.file_storage.putter() as putter:
        bb_tree = extract(
            blackboard_zip,
            cg_helpers.handle_none(blackboard_zip.filename, 'bb_zip'),
            max_size=app.max_large_file_size,
            putter=putter,
        )
        info_files = (f for f in bb_tree.values
                      if _BB_TXT_FORMAT.match(f.name))
        submissions = []
        for info_file in info_files:
            assert isinstance(info_file, ExtractFileTreeFile)
            with info_file.backing_file.open() as info_fileobj:
                info = blackboard.parse_info_file(info_fileobj)

            try:
                tree = process_files(
                    files=__get_files(info),
                    max_size=max_size,
                    putter=putter,
                )
            # TODO: We catch all exceptions, this should probably be
            # narrowed down, however finding all exception types is
            # difficult.
            except Exception:  # pylint: disable=broad-except
                logger.info('Could not extract files', exc_info=True)
                files = __get_files(info)
                files.append(
                    FileStorage(stream=io.BytesIO(
                        b'Some files could not be extracted!', ),
                                filename='__WARNING__'))
                tree = process_files(files=files,
                                     max_size=max_size,
                                     force_txt=True,
                                     putter=putter)

            submissions.append((info, tree))

        if not submissions:
            raise ValueError

    return submissions
Example #4
0
def _process_files(
    files: t.Sequence[FileStorage],
    max_size: FileSize,
    force_txt: bool,
    ignore_filter: t.Optional[SubmissionFilter],
    handle_ignore: IgnoreHandling,
    putter: cg_object_storage.Putter,
) -> ExtractFileTree:
    if ignore_filter is None:
        ignore_filter = EmptySubmissionFilter()

    if (handle_ignore == IgnoreHandling.keep
            and not ignore_filter.can_override_ignore_filter):
        raise APIException(
            'Overriding the ignore filter is not possible for this assignment',
            'The filter disallows overriding it', APICodes.INVALID_PARAM, 400)

    def consider_archive(f: FileStorage) -> bool:
        assert f.filename is not None
        return not force_txt and archive.Archive.is_archive(f.filename)

    if len(files) > 1 or not consider_archive(files[0]):
        tree = ExtractFileTree(name='top')
        filename_counter: t.Counter[str] = Counter()
        # We reverse sort on length so that in the case we have two files named
        # `a` and one name `a (1)` the resulting file `a (1)` will be the
        # origin `a (1)`.
        for file in sorted(
                files,
                key=lambda f: len(f.filename or ''),
                reverse=True,
        ):
            assert file.filename is not None
            filename = file.filename

            idx = 0
            while filename_counter[filename] > 0:
                idx += 1
                parts = file.filename.split('.')
                parts[0] += f' ({idx})'
                filename = '.'.join(parts)
            filename_counter[filename] += 1

            if consider_archive(file):
                tree.add_child(
                    extract(file, filename, max_size=max_size, putter=putter))
            else:
                saved_file = putter.from_stream(
                    file.stream, max_size=app.max_single_file_size)
                if saved_file.is_nothing:
                    raise helpers.make_file_too_big_exception(
                        app.max_single_file_size, single_file=True)
                tree.add_child(
                    ExtractFileTreeFile(name=filename,
                                        backing_file=saved_file.value))

    else:
        tree = extract(
            files[0],
            cg_helpers.handle_none(files[0].filename, 'archive'),
            max_size=max_size,
            putter=putter,
        )

    if not tree.contains_file:
        raise APIException(
            'No files found in archive',
            'No files were in the given archive.',
            APICodes.NO_FILES_SUBMITTED,
            400,
        )

    original_tree = copy.deepcopy(tree)
    tree, total_changes, missing_files = ignore_filter.process_submission(
        tree, handle_ignore)
    actual_file_changes = any(c.deletion_type != DeletionType.leading_directory
                              for c in total_changes)
    if missing_files or (handle_ignore == IgnoreHandling.error
                         and actual_file_changes):
        raise IgnoredFilesException(
            total_changes,
            ignore_filter.CGIGNORE_VERSION,
            original_tree=original_tree,
            missing_files=missing_files,
        )

    logger.info('Removing files', removed_files=total_changes)

    # It did contain files before deleting, so the deletion caused the tree to
    # be empty.
    if not tree.contains_file:
        raise APIException(
            ("All files are ignored by a rule in the assignment's"
             ' ignore file'),
            'No files were in the given archive after filtering.',
            APICodes.NO_FILES_SUBMITTED,
            400,
        )

    tree_size = tree.get_size()
    logger.info('Total size', total_size=tree_size, size=max_size)
    if tree_size > max_size:
        tree.delete()
        raise helpers.make_file_too_big_exception(max_size, single_file=False)

    return tree
Example #5
0
def test_with_not_none():
    assert handle_none(5, 6) == 5
    obj1 = object()
    obj2 = object()
    assert handle_none(obj1, obj2) is obj1
Example #6
0
def test_with_none():
    assert handle_none(None, 5) == 5
    obj = object()
    assert handle_none(None, obj) is obj
Example #7
0
def test_with_both_none():
    assert handle_none(None, None) is None