def authorized(access_token): next_url = request.args.get('next') or url_for('home') post_auth_redirect = session.pop('post_auth_redirect', url_for('job_enqueue')) if access_token is None: return redirect(next_url) params = {'access_token': access_token} gh_user = github.get('user', params=params) github_login = gh_user['login'] if settings.GITHUB_ORG: res = github.raw_request('GET', 'orgs/{0}/members/{1}'.format( settings.GITHUB_ORG, github_login), params=params) if res.status_code != 204: session.pop('user_id', None) return redirect(url_for('unauthorized')) user = User.query.filter_by(github_login=github_login).first() if user is None: user = User(github_login=github_login) db_session.add(user) user.github_name = gh_user.get('name') user.github_avatar = gh_user.get('avatar_url') user.github_access_token = access_token db_session.commit() session['user_id'] = user.id return redirect(post_auth_redirect)
def job_enqueue_post(): form = EnqueueJobForm(request.form, deploy_job=True) if not form.validate(): return jsonify(dict(data=form.errors, _status=False)), 400 job = QueuedJob() form.populate_obj(job) db_session.add(job) db_session.commit() return jsonify(dict(data=True, _status=True))
def emit(self, record): try: if "task" not in record.__dict__: record.__dict__.update(dict(task=u"")) self.format(record) if record.exc_info: record.exc_text = logging._defaultFormatter.formatException(record.exc_info) else: record.exc_text = "" msg = self.formatter.format(record).replace(" ", " ") self._queued_job.log.append(unicode(msg)) db_session.commit() except: import traceback ei = sys.exc_info() traceback.print_exception(ei[0], ei[1], ei[2], None, sys.stderr) del ei
def _next_job(self): queued_job = QueuedJob.__table__ select_job = select([queued_job.c.id]).\ where(queued_job.c.status == QueuedJob.PENDING).\ order_by(queued_job.c.created_ts.asc()).\ limit(1).\ with_for_update() update_job = queued_job.update().\ returning(queued_job.c.id).\ where(queued_job.c.id == select_job.as_scalar()).\ values(status=QueuedJob.PROCESSING) res = db_session.execute(update_job).fetchone() if res: queued_job_id = res['id'] db_session.commit() return JobProcessor(queued_job_id) else: db_session.rollback()
def __init__(self, queued_job_id): self._db = QueuedJob.query.get(queued_job_id) self._logger = Logger.get_logger('Job {}'.format(queued_job_id), to_screen=True, queued_job=self._db) self._timestamp = datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S') self._working_dir = tempfile.mkdtemp() self._context = dict(working_dir=self._working_dir, archive_path=self._archive_path, code_dir=self._code_dir, job_timestamp=self._timestamp, job=self._db.serialize()) self._db.tasks = self._db.job.tasks self._db.stages = self._db.job.stages db_session.commit() self._tasks = self._init_tasks(self._db.tasks) self._stages = self._db.stages
def __init__(self, queued_job_id): self._db = QueuedJob.query.get(queued_job_id) self._logger = Logger.get_logger('Job {}'.format(queued_job_id), to_screen=True, queued_job=self._db) self._timestamp = datetime.datetime.utcnow().strftime('%Y%m%d%H%M%S') self._working_dir = tempfile.mkdtemp() self._context = dict( working_dir=self._working_dir, archive_path=self._archive_path, code_dir=self._code_dir, job_timestamp=self._timestamp, job=self._db.serialize() ) self._db.tasks = self._db.job.tasks self._db.stages = self._db.job.stages db_session.commit() self._tasks = self._init_tasks(self._db.tasks) self._stages = self._db.stages
def mark_failed(self): self.status = self.FAILED self.failed_ts = func.now() db_session.commit()
def mark_successful(self): self.status = self.COMPLETED self.completed_ts = func.now() db_session.commit()