Esempio n. 1
0
            def maybe_delay_task(self,
                                 wanted_time: DatetimeWithTimezone) -> bool:
                """Maybe delay this task.

                This function reschedules the current task with the same
                arguments if the current time is before the given wanted time.

                :param wanted_time: The earliest time this task may be
                    executed.
                :returns: ``True`` if the task was rescheduled, in this case
                    you should quit running the current task.
                """
                now = DatetimeWithTimezone.utcnow()

                logger.info(
                    'Checking if should delay the task',
                    wanted_time=wanted_time.isoformat(),
                    current_time=now.isoformat(),
                    should_delay=now < wanted_time,
                )

                if now >= wanted_time:
                    return False

                logger.info(
                    'Delaying task',
                    wanted_time=wanted_time.isoformat(),
                    current_time=now.isoformat(),
                )
                self.apply_async(args=self.request.args,
                                 kwargs=self.request.kwargs,
                                 eta=wanted_time)
                return True
Esempio n. 2
0
def _get_age_datetime(date: t.Union[DatetimeWithTimezone, str],
                      add_m: bool = False) -> t.Union[float, str]:
    if isinstance(date, str):
        date = DatetimeWithTimezone.fromisoformat(date)
    res = int(
        round((DatetimeWithTimezone.utcnow() - date).total_seconds() / 60))
    if add_m:
        return f'{res}m'
    return res
Esempio n. 3
0
    def _generate_keys(self) -> None:
        key = rsa.generate_private_key(
            public_exponent=65537, key_size=2048, backend=default_backend()
        )
        now = DatetimeWithTimezone.utcnow()
        subject = issuer = x509.Name(
            [
                # Provide various details about who we are.
                x509.NameAttribute(
                    NameOID.ORGANIZATION_NAME,
                    'CodeGrade',
                ),
                x509.NameAttribute(
                    NameOID.COMMON_NAME,
                    current_app.config['EXTERNAL_DOMAIN'],
                ),
            ]
        )
        cert = x509.CertificateBuilder().subject_name(
            subject,
        ).issuer_name(
            issuer,
        ).public_key(
            key.public_key(),
        ).serial_number(
            x509.random_serial_number(),
        ).not_valid_before(
            DatetimeWithTimezone.as_datetime(now),
        ).not_valid_after(
            # TODO: We need to find a way to rotate these certificates. This
            # will however only become a problem in a few years. Should be
            # fixed **before** 2025.
            DatetimeWithTimezone.as_datetime(now + timedelta(days=365 * 5)),
        ).add_extension(
            x509.SubjectAlternativeName(
                [
                    # Describe what sites we want this certificate for.
                    x509.DNSName(current_app.config['EXTERNAL_DOMAIN']),
                ]
            ),
            critical=False,
            # Sign the CSR with our private key.
        ).sign(key, hashes.SHA256(), default_backend())

        self._cert_data = cert.public_bytes(serialization.Encoding.PEM)
        self._key_data = key.private_bytes(
            encoding=serialization.Encoding.PEM,
            format=serialization.PrivateFormat.PKCS8,
            encryption_algorithm=serialization.NoEncryption()
        )
Esempio n. 4
0
def _send_delayed_notification_emails(
    digest_type: Literal[p.models.EmailNotificationTypes.daily, p.models.
                         EmailNotificationTypes.weekly]
) -> None:
    now = DatetimeWithTimezone.utcnow()
    if digest_type == p.models.EmailNotificationTypes.daily:
        max_age = now - datetime.timedelta(days=1, hours=2)
    else:
        assert digest_type == p.models.EmailNotificationTypes.weekly
        max_age = now - datetime.timedelta(days=7, hours=2)

    notifications = p.models.db.session.query(p.models.Notification).filter(
        p.models.Notification.email_sent_at.is_(None),
        p.models.Notification.created_at > max_age,
    ).order_by(p.models.Notification.receiver_id).with_for_update().all()

    should_send = p.models.NotificationsSetting.get_should_send_for_users(
        list(set(n.receiver_id for n in notifications))
    )

    notifications_to_send = []

    now = DatetimeWithTimezone.utcnow()
    for notification in notifications:
        with cg_logger.bound_to_logger(
            notification=notification.__structlog__()
        ):
            if not should_send(notification, digest_type):
                logger.info('Should not send notification')
                continue
            logger.info('Should send notification')
            notification.email_sent_at = now
            notifications_to_send.append(notification)
    p.models.db.session.commit()

    for user, user_notifications in itertools.groupby(
        notifications_to_send, lambda n: n.receiver
    ):
        try:
            p.mail.send_digest_notification_email(
                list(user_notifications), digest_type
            )
        # pylint: disable=broad-except
        except Exception:  # pragma: no cover
            logger.warning(
                'Could not send digest email',
                receiving_user_id=user.id,
                exc_info=True,
                report_to_sentry=True,
            )
Esempio n. 5
0
    def make_unassigned(self) -> None:
        """Make this runner unassigned.

        .. note::

            This also starts a job to kill this runner after a certain amount
            of time if it is still unassigned.
        """
        runner_hex_id = self.id.hex
        self.job_id = None
        if self.state in RunnerState.get_before_running_states():
            if self.state.is_assigned:
                self.state = RunnerState.started
            eta = DatetimeWithTimezone.utcnow() + timedelta(
                minutes=app.config['RUNNER_MAX_TIME_ALIVE']
            )

            callback_after_this_request(
                lambda: cg_broker.tasks.maybe_kill_unneeded_runner.apply_async(
                    (runner_hex_id, ),
                    eta=eta,
                )
            )
        else:
            self.state = RunnerState.cleaning
            callback_after_this_request(
                lambda: cg_broker.tasks.kill_runner.delay(runner_hex_id)
            )
Esempio n. 6
0
def _send_direct_notification_emails_1(
    notification_ids: t.List[int], ) -> None:
    notifications = p.models.db.session.query(p.models.Notification).filter(
        p.models.Notification.id.in_(notification_ids),
        p.models.Notification.email_sent_at.is_(None),
    ).with_for_update().all()

    should_send = p.models.NotificationsSetting.get_should_send_for_users(
        [n.receiver_id for n in notifications])

    for notification in notifications:
        with cg_logger.bound_to_logger(notification=notification):
            if not should_send(notification,
                               p.models.EmailNotificationTypes.direct):
                logger.info('Should not send notification')
                continue

            now = DatetimeWithTimezone.utcnow()
            try:
                p.mail.send_direct_notification_email(notification)
            # pylint: disable=broad-except
            except Exception:  # pragma: no cover
                # This happens if mail sending fails or if the user has no
                # e-mail address.
                # TODO: make this exception more specific
                logger.warning(
                    'Could not send notification email',
                    receiving_user_id=notification.receiver_id,
                    exc_info=True,
                    report_to_sentry=True,
                )
            else:
                notification.email_sent_at = now

    p.models.db.session.commit()
Esempio n. 7
0
def test_git_submission_generated_files(basic, test_client, logged_in,
                                        describe, session, app, monkeypatch,
                                        private_key):
    with describe('setup'):
        course, assig, teacher, student = basic
        with logged_in(student):
            webhook_id = test_client.req(
                'post',
                (f'/api/v1/assignments/{assig.id}/webhook_settings?webhook_'
                 'type=git'), 200)['id']
            webhook = m.WebhookBase.query.get(webhook_id)

            now = DatetimeWithTimezone.utcnow()
            stamp = now.timestamp()

            p.site_settings.Opt.MAX_NORMAL_UPLOAD_SIZE.set_and_commit_value(
                '90kb')
            clone_data = {
                'type': 'github',
                'url': 'MY_URL',
                'commit': 'bda573b5553e6938e4bb1e78a281939d4581e2fe',
                'ref': 'refs/heads/not_master',
                'sender_username': '******',
                'sender_name': 'MY_NAME',
                'webhook_id': str(webhook.id),
                'clone_url': _GITHUB_CLONE_URL,
                'repository_name': 'MY_REPO',
                'event': 'push',
                'branch': 'master',
                'default_branch': 'master',
            }

            webhook._ssh_key = private_key
            session.commit()

        p.tasks._clone_commit_as_submission_1(unix_timestamp=stamp,
                                              clone_data_as_dict=clone_data)
        sub = next(sub for sub in assig.get_all_latest_submissions()
                   if sub.user == student)

    with tempfile.TemporaryDirectory() as tmpdir:
        m.File.restore_directory_structure(
            tmpdir,
            m.File.make_cache(sub),
        )
        root = f'{tmpdir}/{os.listdir(tmpdir)[0]}'

        with describe('cg-size-limit-exceeded'), open(
                f'{root}/cg-size-limit-exceeded') as f:
            content = f.read()
            assert content.endswith('\n')
            assert 'limit was exceeded' in content

        with describe('symbolic link replacement files'), open(
                f'{root}/aaa.pdf.link') as f:
            assert not os.path.islink(f'{root}/aaa.pdf.link')
            content = f.read()
            assert content.endswith('\n')
            assert 'symbolic link' in content
            assert 'test.pdf' in content
Esempio n. 8
0
def assignment(course_name, state_is_hidden, session, request, with_works):
    course = m.Course.query.filter_by(name=course_name).one()
    state = (
        m.AssignmentStateEnum.hidden
        if state_is_hidden else m.AssignmentStateEnum.open
    )
    assig = m.Assignment(
        name='TEST COURSE',
        state=state,
        course=course,
        deadline=DatetimeWithTimezone.utcnow() +
        datetime.timedelta(days=1 if request.param == 'new' else -1),
        is_lti=False,
    )
    session.add(assig)
    session.commit()

    if with_works:
        names = ['Student1', 'Student2', 'Student3', 'Œlµo']
        if with_works != 'single':
            names += names
        for uname in names:
            user = m.User.query.filter_by(name=uname).one()
            work = m.Work(assignment=assig, user=user)
            session.add(work)
        session.commit()

    yield assig
Esempio n. 9
0
 def clear_cookie(self, name: str) -> None:
     self._cookie_data_to_set.append(
         FlaskCookieService._CookieData(
             key=self._get_key(name),
             value='',
             exp=DatetimeWithTimezone.utcfromtimestamp(0),
         ))
Esempio n. 10
0
 def set_g_vars() -> None:
     g.request_id = self.request.id
     g.queries_amount = 0
     g.queries_total_duration = 0
     g.queries_max_duration = None
     g.query_start = None
     g.request_start_time = DatetimeWithTimezone.utcnow()
Esempio n. 11
0
def _delete_mirror_file_at_time_1(name: str, deletion_time: str) -> None:
    if current_task.maybe_delay_task(
            DatetimeWithTimezone.fromisoformat(deletion_time)):
        return

    found = p.app.mirror_file_storage.get(name)
    found.if_just(lambda f: f.delete())
Esempio n. 12
0
def _make_blob_and_redirect(
    params: t.Mapping[str, object],
    version: LTIVersion,
    goto_latest_submission: bool,
) -> werkzeug.wrappers.Response:
    data = {
        'params': {
            'data': params,
            'version': version.__to_json__(),
        },
        'exp': DatetimeWithTimezone.utcnow() + timedelta(minutes=5)
    }
    blob = models.BlobStorage(data=jwt.encode(
        data,
        app.config['LTI_SECRET_KEY'],
        algorithm='HS512',
    ))
    db.session.add(blob)
    db.session.commit()

    return flask.redirect(
        ('{host}/lti_launch/?inLTI=true&blob_id={blob_id}'
         '&redirect={redirect}&goto_latest_submission={goto_latest}').format(
             host=app.config['EXTERNAL_URL'],
             blob_id=blob.id,
             redirect=urllib.parse.quote(
                 flask.request.args.get('codegrade_redirect', ''), ),
             goto_latest=goto_latest_submission,
         ),
        code=303,
    )
Esempio n. 13
0
def parse_datetime(  # pylint: disable=function-redefined
    to_parse: object,
    allow_none: bool = False,
) -> t.Optional[DatetimeWithTimezone]:
    """Parse a datetime string using dateutil.

    :param to_parse: The object to parse, if this is not a string the parsing
        will always fail.
    :param allow_none: Allow ``None`` to be passed without raising a
        exception. if ``to_parse`` is ``None`` and this option is ``True`` the
        result will be ``None``.
    :returns: The parsed DatetimeWithTimezone object.
    :raises APIException: If the parsing fails for whatever reason.
    """
    if to_parse is None and allow_none:
        return None

    if isinstance(to_parse, str):
        try:
            parsed = dateutil.parser.parse(to_parse)
        except (ValueError, OverflowError):
            pass
        else:
            # This assumes that datetimes without tzinfo are in UTC. That is
            # not correct according to the ISO spec, however it is what we used
            # to do so we need to do this because of backwards compatibility.
            return DatetimeWithTimezone.from_datetime(parsed,
                                                      default_tz=timezone.utc)

    raise APIException('The given date is not valid!',
                       '{} cannot be parsed by dateutil.'.format(to_parse),
                       APICodes.INVALID_PARAM, 400)
Esempio n. 14
0
def create_assignment(test_client,
                      course_id=None,
                      state='hidden',
                      deadline=None):
    name = f'__NEW_ASSIGNMENT__-{uuid.uuid4()}'
    if course_id is None:
        course_id = create_course(test_client)

    if deadline == 'tomorrow':
        deadline = DatetimeWithTimezone.utcnow() + datetime.timedelta(days=1)

    res = test_client.req(
        'post',
        f'/api/v1/courses/{get_id(course_id)}/assignments/',
        200,
        data={'name': name},
        result={
            'name': name,
            '__allow_extra__': True
        },
    )
    data = {}
    if state != 'hidden':
        data['state'] = state
    if deadline is not None:
        if isinstance(deadline, datetime.datetime):
            deadline = deadline.isoformat()
        data['deadline'] = deadline
    res = test_client.req('patch',
                          f'/api/v1/assignments/{res["id"]}',
                          200,
                          data=data)
    return res
Esempio n. 15
0
 def copy(self) -> 'RubricRowBase':
     return RubricRowBase(
         created_at=DatetimeWithTimezone.utcnow(),
         description=self.description,
         header=self.header,
         assignment_id=self.assignment_id,
         items=[item.copy() for item in self.items],
         rubric_row_type=self.rubric_row_type,
     )
Esempio n. 16
0
    def state(self, new_state: AutoTestStepResultState) -> None:
        if self._state == new_state:
            return

        self._state = new_state
        if new_state == AutoTestStepResultState.running:
            self.started_at = DatetimeWithTimezone.utcnow()
        else:
            self.started_at = None
Esempio n. 17
0
def about() -> cg_json.JSONResponse[t.Mapping[str, object]]:
    """Get some information about the state of this broker.

    When given a valid ``health`` get parameter this will also return some
    health information.
    """
    if request.args.get('health', object()) == app.config['HEALTH_KEY']:
        now = DatetimeWithTimezone.utcnow()
        slow_created_date = now - timedelta(minutes=app.config['OLD_JOB_AGE'])
        not_started_created_date = now - timedelta(
            minutes=app.config['SLOW_STARTING_AGE']
        )
        not_started_task_date = now - timedelta(
            minutes=app.config['SLOW_STARTING_TASK_AGE']
        )
        slow_task_date = now - timedelta(minutes=app.config['SLOW_TASK_AGE'])

        def get_count(*cols: DbColumn[bool]) -> int:
            return db.session.query(models.Job).filter(
                models.Job.state.notin_(models.JobState.get_finished_states()),
                *cols,
            ).count()

        slow_jobs = get_count(models.Job.created_at < slow_created_date)

        not_starting_jobs = get_count(
            models.Job.created_at < not_started_created_date,
            models.Job.state == models.JobState.waiting_for_runner,
        )

        def as_dt(col: IndexedJSONColumn) -> DbColumn[DatetimeWithTimezone]:
            return col.as_string().cast(TIMESTAMP(timezone=True))

        not_started_task = models.Job.job_metadata['results']['not_started']
        jobs_not_starting_tasks = get_count(
            as_dt(not_started_task) < not_started_task_date
        )

        slow_task = models.Job.job_metadata['results']['running']
        jobs_with_slow_tasks = get_count(as_dt(slow_task) < slow_task_date)

        health = {
            'not_starting_jobs': not_starting_jobs,
            'slow_jobs': slow_jobs,
            'jobs_with_not_starting_tasks': jobs_not_starting_tasks,
            'jobs_with_slow_tasks': jobs_with_slow_tasks,
        }
    else:
        health = {}

    return cg_json.jsonify(
        {
            'health': health,
            'version': app.config.get('CUR_COMMIT', 'unknown'),
        },
        status_code=500 if any(health.values()) else 200,
    )
Esempio n. 18
0
def test_delete_code_twice(
    assignment_real_works, test_client, request, error_template, ta_user,
    logged_in, session
):
    assignment, work = assignment_real_works
    work_id = work['id']

    with logged_in(ta_user):
        res = test_client.req(
            'get',
            f'/api/v1/submissions/{work_id}/files/',
            200,
            result={
                'entries': list,
                'id': str,
                'name': str,
            }
        )
        assert len(res['entries']) == 2

        assignment.deadline = DatetimeWithTimezone.utcnow(
        ) - datetime.timedelta(days=1)
        session.commit()

        test_client.req(
            'delete',
            f'/api/v1/code/{res["entries"][0]["id"]}',
            204,
            result=None,
        )

        ents = test_client.req(
            'get',
            f'/api/v1/submissions/{work_id}/files/',
            200,
            query={'owner': 'teacher'},
        )['entries']

        assert len(ents) == 1, 'The teacher files should have a file less'

        test_client.req(
            'delete',
            f'/api/v1/code/{res["entries"][0]["id"]}',
            403,
            result=error_template,
        )

        ents = test_client.req(
            'get',
            f'/api/v1/submissions/{work_id}/files/',
            200,
            query={'owner': 'teacher'},
        )['entries']

        assert len(ents) == 1, 'The teacher files should have a file less'
Esempio n. 19
0
            def inner(*args, **kwargs):
                self.args.append(args)
                self.kwargs.append(kwargs)
                self.call_dates.append(DatetimeWithTimezone.utcnow())

                if self.with_args:
                    self.rets.append(func(*args, **kwargs))
                else:
                    self.rets.append(func())

                return self.rets[-1]
Esempio n. 20
0
def _create_logger(set_user: bool) -> None:
    g.request_start_time = DatetimeWithTimezone.utcnow()

    g.request_id = uuid.uuid4()
    log = logger.new(
        request_id=str(g.request_id),
        path=request.path,
        view=getattr(request.url_rule, 'rule', None),
        base_url=flask.current_app.config.get('EXTERNAL_URL'),
    )

    if set_user:
        flask_jwt.verify_jwt_in_request_optional()
        log.bind(current_user=flask_jwt.current_user
                 and flask_jwt.current_user.username)

    func = log.info
    try:
        start = DatetimeWithTimezone.utcfromtimestamp(
            float(request.headers['X-Request-Start-Time']))
        wait_time = (g.request_start_time - start).total_seconds()
        if wait_time > 5:
            func = log.error
        if wait_time > 1:
            func = log.warning
        log.bind(time_spend_in_queue=wait_time)
    except:  # pylint: disable=bare-except
        pass

    try:
        func(
            "Request started",
            host=request.host_url,
            method=request.method,
            query_args={
                k: '<PASSWORD>' if k == 'password' else v
                for k, v in request.args.items()
            },
        )
    finally:
        log.try_unbind('time_spend_in_queue')
Esempio n. 21
0
    def _get_see_as_running_job(self) -> bool:
        if self.state.is_running:
            return True

        now = DatetimeWithTimezone.utcnow()
        grace_period = Setting.get(PossibleSetting.assigned_grace_period)
        if (
            self.state.is_assigned and
            (self.updated_at - now) < timedelta(seconds=grace_period)
        ):
            return True
        return False
Esempio n. 22
0
def get_file(file_name: str,
             name: str = 'export') -> werkzeug.wrappers.Response:
    """Serve some specific file in the uploads folder.

    .. :quickref: File; Get an uploaded file directory.

    .. note::
        Only files uploaded using :http:post:`/api/v1/files/` may be retrieved.

    :param str file_name: The filename of the file to get.
    :returns: The requested file.

    :raises PermissionException: If there is no logged in user. (NOT_LOGGED_IN)
    """
    name = request.args.get('name', name)

    directory = app.config['MIRROR_UPLOAD_DIR']
    error = False

    @callback_after_this_request
    def __delete_file() -> None:
        # Make sure we don't delete when receiving HEAD requests
        if request.method == 'GET' and not error:
            filename = safe_join(directory, file_name)
            os.unlink(filename)

    try:
        full_path = files.safe_join(directory, file_name)
        if os.path.isfile(full_path):
            mtime = os.path.getmtime(full_path)
            age = get_request_start_time(
            ) - DatetimeWithTimezone.utcfromtimestamp(mtime)
            if age > _MAX_AGE:
                raise NotFound

        mimetype = request.args.get('mime', None)
        as_attachment = request.args.get('not_as_attachment', False)
        return send_from_directory(
            directory,
            file_name,
            attachment_filename=name,
            as_attachment=as_attachment,
            mimetype=mimetype,
            cache_timeout=-1,
        )
    except NotFound:
        error = True
        raise APIException(
            'The specified file was not found',
            f'The file with name "{file_name}" was not found or is deleted.',
            APICodes.OBJECT_NOT_FOUND,
            404,
        )
Esempio n. 23
0
    def _get_see_as_running_job_expr(cls: t.Type['Runner']) -> DbColumn[bool]:
        # pylint: disable=no-self-argument
        now = DatetimeWithTimezone.utcnow()
        grace_period = Setting.get(PossibleSetting.assigned_grace_period)

        return expression.or_(
            cls.state == RunnerState.running,
            expression.and_(
                cls.state == RunnerState.assigned,
                (cls.updated_at - now) < timedelta(seconds=grace_period),
            ),
        )
Esempio n. 24
0
 def __transform_to_datetime(self, value: str) -> DatetimeWithTimezone:
     try:
         parsed = dateutil.parser.isoparse(value)
     except (ValueError, OverflowError) as exc:
         raise SimpleParseError(
             self,
             value,
             extra={
                 'message': "which can't be parsed as a valid datetime",
             },
         ) from exc
     else:
         return DatetimeWithTimezone.from_datetime(
             parsed, default_tz=datetime.timezone.utc)
Esempio n. 25
0
def test_error_when_no_cookies(test_client, describe, logged_in, admin_user):
    with describe('setup'), logged_in(admin_user):
        lti_assig_id = str(uuid.uuid4())
        lti_course_id = str(uuid.uuid4())
        lms = 'Canvas'
        provider = helpers.create_lti1p3_provider(
            test_client,
            lms,
            iss='https://canvas.instructure.com',
            client_id=str(uuid.uuid4()) + '_lms=' + lms)
        data = make_launch_data(CANVAS_DATA, provider, {
            'Assignment.id': lti_assig_id,
            'Course.id': lti_course_id
        })

        def assert_is_cookie_error(response):
            assert response['message'] == "Couldn't set needed cookies"
            assert response['code'] == 'LTI1_3_ERROR'
            assert response['original_exception'][
                'code'] == 'LTI1_3_COOKIE_ERROR'

    with describe('error when no cookies are present at all'):
        oidc = do_oidc_login(test_client, provider)
        test_client.cookie_jar.clear()
        response = do_lti_launch(test_client, provider, data, oidc, 400)
        assert_is_cookie_error(response)

    with describe('error when old launch cookies are present'):
        oidc = do_oidc_login(test_client, provider)
        old_jar = copy.copy(test_client.cookie_jar)
        test_client.cookie_jar.clear()

        with freezegun.freeze_time(DatetimeWithTimezone.utcnow() +
                                   timedelta(hours=1)):
            oidc = do_oidc_login(test_client, provider)
            test_client.cookie_jar = old_jar
            response = do_lti_launch(test_client, provider, data, oidc, 400)
            assert_is_cookie_error(response)

    with describe('error when cookie has bogus value'):
        oidc = do_oidc_login(test_client, provider)
        all_cookies = list(test_client.cookie_jar)
        test_client.cookie_jar.clear()
        for cook in all_cookies:
            cook.value = 'HAHA BOGUS VALUE'
            test_client.cookie_jar.set_cookie(cook)

        response = do_lti_launch(test_client, provider, data, oidc, 400)
        assert_is_cookie_error(response)
Esempio n. 26
0
    def make_unassigned(self) -> None:
        """Make this runner unassigned.

        .. note::

            This also starts a job to kill this runner after a certain amount
            of time if it is still unassigned.
        """
        self.job_id = None
        eta = DatetimeWithTimezone.utcnow() + timedelta(
            minutes=app.config['RUNNER_MAX_TIME_ALIVE'])
        runner_hex_id = self.id.hex

        callback_after_this_request(
            lambda: cg_broker.tasks.maybe_kill_unneeded_runner.apply_async(
                (runner_hex_id, ),
                eta=eta,
            ))
Esempio n. 27
0
    def set_cookie(self, name: str, value: str, exp: int = 60) -> None:
        """Set a cookie named ``name`` to the given ``value``.

        This doesn't actually set the cookie yet, you seed to use
        :meth:`.CookieService.update_response` to actually set the cookies on a
        response.

        :param name: The name of the cookie to set.
        :param value: The value of the cookie.
        :param exp: The expiration date of the cookie in seconds.

        :returns: Nothing.
        """
        self._cookie_data_to_set.append(
            FlaskCookieService._CookieData(
                key=self._get_key(name),
                value=value,
                exp=DatetimeWithTimezone.utcnow() + timedelta(seconds=exp),
            ))
Esempio n. 28
0
def _run_autotest_batch_runs_1() -> None:
    now = DatetimeWithTimezone.utcnow()
    # Limit the amount of runs, this way we never accidentally overload the
    # server by doing a large amount of batch run.
    max_runs = p.site_settings.Opt.AUTO_TEST_MAX_CONCURRENT_BATCH_RUNS.value

    runs = p.models.AutoTestRun.query.join(
        p.models.AutoTestRun.auto_test).join(p.models.Assignment).filter(
            p.models.AutoTestRun.batch_run_done.is_(False),
            p.models.Assignment.deadline < now,
        ).options(contains_eager(p.models.AutoTestRun.auto_test)).order_by(
            p.models.Assignment.deadline).with_for_update().limit(
                max_runs).all()

    logger.info('Running batch run', run_ids=[r.id for r in runs])

    for run in runs:
        run.do_batch_run()

    p.models.db.session.commit()
Esempio n. 29
0
def create_lti_assignment(session,
                          course,
                          state='hidden',
                          deadline='tomorrow'):
    name = f'__NEW_LTI_ASSIGNMENT__-{uuid.uuid4()}'

    if deadline == 'tomorrow':
        deadline = DatetimeWithTimezone.utcnow() + datetime.timedelta(days=1)

    res = m.Assignment(
        name=name,
        course=course,
        deadline=deadline,
        lti_assignment_id=str(uuid.uuid4()),
        is_lti=True,
    )
    res.lti_grade_service_data = str(uuid.uuid4())
    res.set_state_with_string(state)
    session.add(res)
    session.commit()
    return res
Esempio n. 30
0
def _delete_file_at_time_1(
    filename: str, in_mirror_dir: bool, deletion_time: str
) -> None:
    if current_task.maybe_delay_task(
        DatetimeWithTimezone.fromisoformat(deletion_time)
    ):
        return

    if in_mirror_dir:
        root = p.app.config['MIRROR_UPLOAD_DIR']
    else:  # pragma: no cover
        # The case outside of the mirror_upload_dir is not yet used
        root = p.app.config['UPLOAD_DIR']

    filename = p.files.safe_join(root, filename)
    if os.path.isfile(filename):
        # There is a race condition here (file is removed in this small space),
        # but we don't care as it is removed in that case
        try:
            os.unlink(filename)
        except FileNotFoundError:  # pragma: no cover
            pass