def clear_cookie(self, name: str) -> None: self._cookie_data_to_set.append( FlaskCookieService._CookieData( key=self._get_key(name), value='', exp=DatetimeWithTimezone.utcfromtimestamp(0), ))
def get_file(file_name: str, name: str = 'export') -> werkzeug.wrappers.Response: """Serve some specific file in the uploads folder. .. :quickref: File; Get an uploaded file directory. .. note:: Only files uploaded using :http:post:`/api/v1/files/` may be retrieved. :param str file_name: The filename of the file to get. :returns: The requested file. :raises PermissionException: If there is no logged in user. (NOT_LOGGED_IN) """ name = request.args.get('name', name) directory = app.config['MIRROR_UPLOAD_DIR'] error = False @callback_after_this_request def __delete_file() -> None: # Make sure we don't delete when receiving HEAD requests if request.method == 'GET' and not error: filename = safe_join(directory, file_name) os.unlink(filename) try: full_path = files.safe_join(directory, file_name) if os.path.isfile(full_path): mtime = os.path.getmtime(full_path) age = get_request_start_time( ) - DatetimeWithTimezone.utcfromtimestamp(mtime) if age > _MAX_AGE: raise NotFound mimetype = request.args.get('mime', None) as_attachment = request.args.get('not_as_attachment', False) return send_from_directory( directory, file_name, attachment_filename=name, as_attachment=as_attachment, mimetype=mimetype, cache_timeout=-1, ) except NotFound: error = True raise APIException( 'The specified file was not found', f'The file with name "{file_name}" was not found or is deleted.', APICodes.OBJECT_NOT_FOUND, 404, )
def _create_logger(set_user: bool) -> None: g.request_start_time = DatetimeWithTimezone.utcnow() g.request_id = uuid.uuid4() log = logger.new( request_id=str(g.request_id), path=request.path, view=getattr(request.url_rule, 'rule', None), base_url=flask.current_app.config.get('EXTERNAL_URL'), ) if set_user: flask_jwt.verify_jwt_in_request_optional() log.bind(current_user=flask_jwt.current_user and flask_jwt.current_user.username) func = log.info try: start = DatetimeWithTimezone.utcfromtimestamp( float(request.headers['X-Request-Start-Time'])) wait_time = (g.request_start_time - start).total_seconds() if wait_time > 5: func = log.error if wait_time > 1: func = log.warning log.bind(time_spend_in_queue=wait_time) except: # pylint: disable=bare-except pass try: func( "Request started", host=request.host_url, method=request.method, query_args={ k: '<PASSWORD>' if k == 'password' else v for k, v in request.args.items() }, ) finally: log.try_unbind('time_spend_in_queue')
def creation_time(self) -> DatetimeWithTimezone: mtime = os_path.getmtime(self._path) return DatetimeWithTimezone.utcfromtimestamp(mtime)
def test_check_heartbeat(session, describe, monkeypatch, stub_function_class, app, assignment_real_works, monkeypatch_celery): assignment, submission = assignment_real_works sub2_id = m.Work.query.filter_by(assignment_id=assignment.id).filter( m.Work.id != submission['id']).first().id with describe('setup'): stub_heart = stub_function_class() monkeypatch.setattr(t, 'check_heartbeat_auto_test_run', stub_heart) stub_notify_new = stub_function_class() monkeypatch.setattr(t, '_notify_broker_of_new_job_1', stub_notify_new) orig_kill_and_adjust = t._kill_runners_and_adjust_1 kill_and_adjust = stub_function_class(orig_kill_and_adjust) monkeypatch.setattr(t, 'kill_runners_and_adjust', kill_and_adjust) stub_notify_stop = stub_function_class() monkeypatch.setattr(t, 'notify_broker_end_of_job', stub_notify_stop) stub_notify_kill_single = stub_function_class() monkeypatch.setattr(t, 'notify_broker_kill_single_runner', stub_notify_kill_single) test = m.AutoTest(setup_script='', run_setup_script='', assignment=assignment) run = m.AutoTestRun(_job_id=uuid.uuid4(), auto_test=test, batch_run_done=False) run.results = [ m.AutoTestResult(work_id=sub2_id, final_result=False), m.AutoTestResult(work_id=submission['id'], final_result=False) ] run.results[0].state = m.AutoTestStepResultState.running run.results[1].state = m.AutoTestStepResultState.passed session.add(run) with app.test_request_context('/non_existing', {}): run.add_active_runner('localhost') runner = run.runners[0] run.results[0].runner = runner run.results[1].runner = runner with app.test_request_context('/non_existing', {}): run.add_active_runner('localhost2') assert runner.run session.commit() with describe('not expired'): t._check_heartbeat_stop_test_runner_1.delay(runner.id.hex) now = DatetimeWithTimezone.utcnow() # As the heartbeats have not expired yet a new check should be # scheduled assert len(stub_heart.all_args) == 1 assert stub_heart.all_args[0][0] == (runner.id.hex, ) assert (stub_heart.all_args[0]['eta'] - now).total_seconds() > 0 assert not stub_notify_new.called assert not kill_and_adjust.called assert not stub_notify_stop.all_args assert not stub_notify_kill_single.all_args with describe('expired'): runner.last_heartbeat = DatetimeWithTimezone.utcfromtimestamp(0) old_job_id = run.get_job_id() session.commit() run = runner.run t._check_heartbeat_stop_test_runner_1.delay(runner.id.hex) assert not stub_heart.called assert len(stub_notify_new.all_args) == 0 assert len(kill_and_adjust.args) == 1 assert len(stub_notify_stop.all_args) == 0 assert len(stub_notify_kill_single.all_args) == 0 run.get_job_id() != old_job_id assert runner.run is None assert (run.results[0].state == m.AutoTestStepResultState.not_started ), 'The results should be cleared' assert (run.results[1].state == m.AutoTestStepResultState.passed ), 'Passed results should not be cleared' with describe('With non existing runner'): t._check_heartbeat_stop_test_runner_1.delay(uuid.uuid4().hex) assert not stub_heart.called assert not stub_notify_new.called assert not stub_notify_stop.called assert not stub_notify_kill_single.called
def _clone_commit_as_submission_1( unix_timestamp: float, clone_data_as_dict: t.Dict[str, t.Any], ) -> None: """Clone a repository and create a submission from its files. .. warning:: This function **does not** check if the user has permission to create a submission, so this should be done by the caller. :param unix_timestamp: The date the submission should be created at. The rationale for passing this is that a user might have permission to create a submission at this time, but not at the time of the commit (or vica versa). And as commit times cannot be trusted this should be given explicitly. :param clone_data_as_dict: A :class:`p.models.GitCloneData` as a dictionary, including private data. :returns: Nothing. """ clone_data = p.models.GitCloneData(**clone_data_as_dict) webhook = p.models.WebhookBase.query.get(clone_data.webhook_id) if webhook is None: logger.warning('Could not find webhook') return assignment = p.models.Assignment.query.filter_by(id=webhook.assignment_id ).with_for_update().one() created_at = DatetimeWithTimezone.utcfromtimestamp(unix_timestamp) with webhook.written_private_key() as fname, tempfile.TemporaryDirectory( ) as tmpdir: ssh_username = webhook.ssh_username assert ssh_username is not None program = p.helpers.format_list( p.current_app.config['GIT_CLONE_PROGRAM'], clone_url=clone_data.clone_url, commit=clone_data.commit, out_dir=tmpdir, ssh_key=fname, ssh_username=ssh_username, git_branch=clone_data.branch, ) success, output = p.helpers.call_external(program) logger.info( 'Called external clone program', successful=success, command_output=output ) if not success: return p.archive.Archive.replace_symlinks(tmpdir) tree = p.extract_tree.ExtractFileTree( values=p.files.rename_directory_structure( tmpdir, p.app.max_file_size ).values, name=clone_data.repository_name.replace('/', ' - '), parent=None ) logger.info('Creating submission') work = p.models.Work.create_from_tree( assignment, webhook.user, tree, created_at=created_at ) work.origin = p.models.WorkOrigin[clone_data.type] work.extra_info = clone_data.get_extra_info() p.models.db.session.commit()