def run_get_progress_regex(proj, build_id, run): r = _get_run(proj, build_id, run) rundef = json.loads(Storage().get_run_definition(r)) progress = rundef.get("console-progress") if progress: return jsendify(progress) raise ApiError(404, {"message": "Run has not defined console-progress"})
def promoted_build_get(proj, name): b = get_or_404(Build.query.join(Project).filter( Project.name == proj, Build.status == BuildStatus.PROMOTED, Build.name == name, )) return jsendify({'build': _promoted_as_json(Storage(), b)})
def _poll(entries: Dict[int, PollerEntry]): try: triggers = _get_project_triggers() if triggers is None: return except Exception: logging.exception("Unable to get project list from JobServ") return names = set(triggers.keys()) cur_names = set(entries.keys()) for n in cur_names - names: log.info("Removing %s from poller list", n) del entries[n] for n in names - cur_names: log.info("Adding %s to poller list", n) entries[n] = PollerEntry(trigger=triggers[n]) for n in names & cur_names: if entries[n].trigger != triggers[n]: log.info("Updating %s", n) entries[n].trigger = triggers[n] with Storage().git_poller_cache() as refs_cache: for entry in entries.values(): log.debug("Checking project: %s %d", entry.trigger.project, entry.trigger.id) projdef = _get_projdef(entry) proj_refs = refs_cache.setdefault(str(entry.trigger.id), {}) if projdef: _poll_project(proj_refs, entry)
def promoted_build_list(proj): p = get_or_404(Project.query.filter_by(name=proj)) q = Build.query.filter(Build.proj_id == p.id).filter( Build.status == BuildStatus.PROMOTED).order_by(Build.id.desc()) s = Storage() return paginate_custom('builds', q, lambda x: _promoted_as_json(s, x))
def run_get_artifact(proj, build_id, run, path): r = _get_run(proj, build_id, run) if r.complete: storage = Storage() if path.endswith(".html"): # we are probably trying to render a static site like a build of # ltd-docs. Return its content rather than a redirect so it will # render in the browser content = storage.get_artifact_content(r, path) return content, 200, {"Content-Type": "text/html"} resp = storage.get_download_response(request, r, path) resp.headers["X-RUN-STATUS"] = r.status.name return resp if path != "console.log": raise ApiError(404, {"message": "Run in progress, no artifacts available"}) if r.status == BuildStatus.QUEUED: msg = "# Waiting for worker with tag: " + r.host_tag return (msg, 200, { "Content-Type": "text/plain", "X-RUN-STATUS": r.status.name }) try: fd = Storage().console_logfd(r, "rb") offset = request.headers.get("X-OFFSET") if offset: offset = int(offset) end = fd.seek(0, 2) if offset >= end: return ( b"", 200, { "Content-Type": "text/html", "X-RUN-STATUS": r.status.name }, ) fd.seek(offset, 0) resp = make_response(send_file(fd, mimetype="text/plain")) resp.headers["X-RUN-STATUS"] = r.status.name return resp except FileNotFoundError: # This is a race condition. The run completed while we were checking return Storage().get_download_response(request, r, path)
def run_get(proj, build_id, run): r = _get_run(proj, build_id, run) data = r.as_json(detailed=True) artifacts = [] for a in Storage().list_artifacts(r): u = url_for('api_run.run_get_artifact', proj=proj, build_id=build_id, run=run, path=a, _external=True) artifacts.append(u) data['artifacts'] = artifacts return jsendify({'run': data})
def run_upload(proj, build_id, run): r = _get_run(proj, build_id, run) _authenticate_runner(r) data = request.get_json() urls = {} if data: # determine url expiration, default 1800 = 30 minues expiration = request.headers.get('X-URL-EXPIRATION', 1800) urls = Storage().generate_signed(r, data, expiration) return jsendify({'urls': urls})
def test_get_stream(self, storage): r = Run(self.build, "run0") r.status = BuildStatus.RUNNING db.session.add(r) db.session.commit() with Storage().console_logfd(r, "ab") as f: f.write(b"this is the message") resp = self.client.get(self.urlbase + "run0/console.log") self.assertEqual(200, resp.status_code) self.assertEqual("text/plain", resp.mimetype)
def test_get_stream(self, storage): r = Run(self.build, 'run0') r.status = BuildStatus.RUNNING db.session.add(r) db.session.commit() with Storage().console_logfd(r, 'ab') as f: f.write(b'this is the message') resp = self.client.get(self.urlbase + 'run0/console.log') self.assertEqual(200, resp.status_code) self.assertEqual('text/plain', resp.mimetype)
def trigger_build(project, reason, trigger_name, params, secrets, proj_def, queue_priority=0): proj_def = ProjectDefinition.validate_data(proj_def) b = Build.create(project) try: if reason: b.reason = reason if trigger_name: b.trigger_name = trigger_name storage = Storage() storage.create_project_definition( b, yaml.dump(proj_def._data, default_flow_style=False)) trigger = proj_def.get_trigger(trigger_name) if not trigger: raise KeyError("Project(%s) does not have a trigger: %s" % (project, trigger_name)) if trigger.get("triggers"): # there's a trigger to run after all the runs for this trigger # completed. it will need to know the parameters for this job storage.create_build_params(b, params) except Exception as e: raise _fail_unexpected(b, e) trigger_runs(storage, proj_def, b, trigger, params, secrets, None, queue_priority) db.session.commit() return b
def backup(keep_local=False): command = ('mysqldump', '--user='******'--password='******'--host=' + db.engine.url.host, '--single-transaction', db.engine.url.database) backup = '/data/jobserv-db.sql-%s' % datetime.datetime.now() with open(backup, 'w') as f: subprocess.check_call(command, stdout=f) Storage()._create_from_file('backups/' + os.path.basename(backup), backup, 'application/x-sql') if not keep_local: os.unlink(backup)
def test_run_stream(self, storage): r = Run(self.build, 'run0') db.session.add(r) db.session.commit() headers = [ ('Authorization', 'Token %s' % r.api_key), ('X-RUN-STATUS', 'RUNNING'), ] self._post(self.urlbase + 'run0/', 'message', headers, 200) with Storage().console_logfd(r, 'r') as f: self.assertEqual('message', f.read()) db.session.refresh(r) self.assertEqual('RUNNING', r.status.name)
def _fail_unexpected(build, exception): r = Run(build, 'build-failure') db.session.add(r) r.set_status(BuildStatus.FAILED) db.session.commit() storage = Storage() with storage.console_logfd(r, 'a') as f: f.write('Unexpected error prevented build from running:\n') f.write(str(exception)) storage.copy_log(r) if BUILD_URL_FMT: url = BUILD_URL_FMT.format(project=build.project.name, build=build.build_id) else: url = url_for('api_run.run_get_artifact', proj=build.project.name, build_id=build.build_id, run=r.name, path='console.log') exception = ApiError(500, str(exception)) exception.resp.headers.extend({'Location': url}) return exception
def test_run_stream(self, storage): r = Run(self.build, "run0") db.session.add(r) db.session.commit() headers = [ ("Authorization", "Token %s" % r.api_key), ("X-RUN-STATUS", "RUNNING"), ] self._post(self.urlbase + "run0/", "message", headers, 200) with Storage().console_logfd(r, "r") as f: self.assertEqual("message", f.read()) db.session.refresh(r) self.assertEqual("RUNNING", r.status.name)
def backup(keep_local=False): command = ( "mysqldump", "--user="******"--password="******"--host=" + db.engine.url.host, "--single-transaction", db.engine.url.database, ) backup = "/data/jobserv-db.sql-%s" % datetime.datetime.now() with open(backup, "w") as f: subprocess.check_call(command, stdout=f) Storage()._create_from_file("backups/" + os.path.basename(backup), backup, "application/x-sql") if not keep_local: os.unlink(backup)
def run_get_artifact(proj, build_id, run, path): r = _get_run(proj, build_id, run) if r.complete: storage = Storage() if path.endswith('.html'): # we are probably trying to render a static site like a build of # ltd-docs. Return its content rather than a redirect so it will # render in the browser content = storage.get_artifact_content(r, path) return content, 200, {'Content-Type': 'text/html'} resp = storage.get_download_response(request, r, path) resp.headers['X-RUN-STATUS'] = r.status.name return resp if path != 'console.log': raise ApiError(404, {'message': 'Run in progress, no artifacts available'}) if r.status == BuildStatus.QUEUED: msg = '# Waiting for worker with tag: ' + r.host_tag return (msg, 200, { 'Content-Type': 'text/plain', 'X-RUN-STATUS': r.status.name }) try: fd = Storage().console_logfd(r, 'rb') offset = request.headers.get('X-OFFSET') if offset: fd.seek(int(offset), 0) resp = make_response(send_file(fd, mimetype='text/plain')) resp.headers['X-RUN-STATUS'] = r.status.name return resp except FileNotFoundError: # This is a race condition. The run completed while we were checking return Storage().get_download_response(request, r, path)
def build_get_project_definition(proj, build_id): p = get_or_404(Project.query.filter(Project.name == proj)) b = get_or_404( Build.query.filter(Build.project == p, Build.build_id == build_id)) pd = Storage().get_project_definition(b) return pd, 200, {"Content-Type": "text/yaml"}