def get(self): job_id = g.token['job']['id'] project_id = g.token['project']['id'] r = g.db.execute_one( ''' SELECT su.filename FROM source_upload su INNER JOIN build b ON b.source_upload_id = su.id INNER JOIN job j ON j.build_id = b.id WHERE j.id = %s AND b.project_id = %s AND j.project_id = %s ''', [job_id, project_id, project_id]) filename = r[0] filename = filename.replace('/', '_') g.release_db() f = storage.download_source(filename) if not f: abort(404) return send_file(f)
def get(self, parent_job_id): job_id = g.token['job']['id'] if not validate_uuid4(parent_job_id): abort(400, "Invalid uuid") dependencies = g.db.execute_one( ''' SELECT dependencies FROM job WHERE id = %s ''', [job_id])[0] is_valid_dependency = False for dep in dependencies: if dep['job-id'] == parent_job_id: is_valid_dependency = True break if not is_valid_dependency: abort(404, "Job not found") key = "%s.tar.gz" % parent_job_id key = key.replace('/', '_') g.release_db() f = storage.download_output(key) if not f: abort(404) return send_file(f)
def get(self, parent_job_id): job_id = g.token['job']['id'] if not validate_uuid4(parent_job_id): abort(400, "Invalid uuid") filename = request.args.get('filename', None) if not filename: abort(400, "Invalid filename") dependencies = g.db.execute_one(''' SELECT dependencies FROM job WHERE id = %s ''', [job_id])[0] is_valid_dependency = False for dep in dependencies: if dep['job-id'] == parent_job_id: is_valid_dependency = True break if not is_valid_dependency: abort(404, "Job not found") key = "%s/%s" % (parent_job_id, filename) g.release_db() f = storage.download_output(key) if not f: abort(404) return send_file(f)
def post(self): job_id = g.token['job']['id'] key = "%s.tar.gz" % job_id key = key.replace('/', '_') g.release_db() storage.upload_output(request.files['output.tar.gz'].stream, key) return jsonify({})
def post(self): job_id = g.token['job']['id'] key = "%s.tar.gz" % job_id key = key.replace('/', '_') stream = request.files['output.tar.gz'].stream # determine all children jobs = g.db.execute_many_dict( ''' SELECT cluster_name, dependencies FROM job WHERE build_id = (SELECT build_id FROM job WHERE id = %s) AND state = 'queued' ''', [job_id]) clusters = set() for j in jobs: dependencies = j.get('dependencies', None) if not dependencies: continue for dep in dependencies: if dep['job-id'] != job_id: continue clusters.add(j['cluster_name']) clusters = g.db.execute_many_dict( ''' SELECT root_url FROM cluster WHERE active = true AND name = ANY (%s) AND name != %s ''', [list(clusters), os.environ['INFRABOX_CLUSTER_NAME']]) g.release_db() storage.upload_output(stream, key) for c in clusters: stream.seek(0) url = '%s/api/job/output' % c['root_url'] files = {'output.tar.gz': stream} token = encode_job_token(job_id) headers = {'Authorization': 'bearer ' + token} r = requests.post(url, files=files, headers=headers, timeout=120) if r.status_code != 200: abort(500, "Failed to upload data") return jsonify({})
def get(self, project_id, job_id): g.release_db() key = '%s.tar.gz' % job_id f = storage.download_output(key) if not f: abort(404) return send_file(f, attachment_filename=key)
def post(self): project_id = g.token['project']['id'] job_name = g.token['job']['name'] template = 'project_%s_job_%s.tar.gz' key = template % (project_id, job_name) key = key.replace('/', '_') g.release_db() storage.upload_cache(request.files['cache.tar.gz'].stream, key) return jsonify({})
def get(self, project_id, job_id): ''' Returns the the content of /infrabox/output of the job ''' g.release_db() key = '%s.tar.snappy' % job_id f = storage.download_output(key) if not f: abort(404) return send_file(f, attachment_filename=key)
def post(self): g.release_db() project_id = g.token['project']['id'] job_name = g.token['job']['name'] for f, _ in request.files.items(): template = 'project_%s_job_%s_%s' key = template % (project_id, job_name, f) key = key.replace('/', '_') stream = request.files[f].stream storage.upload_cache(stream, key) return jsonify({})
def get(self): g.release_db() project_id = g.token['project']['id'] job_name = g.token['job']['name'] filename = request.args.get('filename', None) template = 'project_%s_job_%s_%s' key = template % (project_id, job_name, filename) key = key.replace('/', '_') f = storage.download_cache(key) if not f: abort(404) return send_file(f)
def get(self): project_id = g.token['project']['id'] job_name = g.token['job']['name'] template = 'project_%s_job_%s.tar.gz' key = template % (project_id, job_name) key = key.replace('/', '_') g.release_db() f = storage.download_cache(key) if not f: abort(404) return send_file(f)
def post(self): job_id = g.token['job']['id'] for f, _ in request.files.items(): key = "%s/%s" % (job_id, f) stream = request.files[f].stream # determine all children jobs = g.db.execute_many_dict(''' SELECT cluster_name, dependencies FROM job WHERE build_id = (SELECT build_id FROM job WHERE id = %s) AND state = 'queued' ''', [job_id]) current_cluster = g.db.execute_one_dict(''' SELECT cluster_name FROM job WHERE id = %s ''', [job_id])['cluster_name'] clusters = set() for j in jobs: dependencies = j.get('dependencies', None) if not dependencies: continue for dep in dependencies: if dep['job-id'] != job_id: continue clusters.add(j['cluster_name']) clusters = g.db.execute_many_dict(''' SELECT root_url FROM cluster WHERE active = true AND enabled = true AND name = ANY (%s) AND name != %s AND name != %s ''', [list(clusters), os.environ['INFRABOX_CLUSTER_NAME'], current_cluster]) g.release_db() storage.upload_output(stream, key) for c in clusters: stream.seek(0) url = '%s/api/job/output' % c['root_url'] files = {f: stream} token = encode_job_token(job_id) headers = {'Authorization': 'bearer ' + token} r = requests.post(url, files=files, headers=headers, timeout=120, verify=False) if r.status_code != 200: app.logger.error(r.text) abort(500, "Failed to upload data") return jsonify({})
def get(self, parent_job_id): job_id = g.token['job']['id'] if not validate_uuid(parent_job_id): abort(400, "Invalid uuid") filename = request.args.get('filename', None) if not filename: abort(400, "Invalid filename") dependencies = g.db.execute_one( ''' SELECT dependencies FROM job WHERE id = %s ''', [job_id])[0] is_valid_dependency = False for dep in dependencies: if dep['job-id'] == parent_job_id: is_valid_dependency = True break if not is_valid_dependency: abort(404, "Job not found") key = "%s/%s" % (parent_job_id, filename) f = storage.download_output(key) if f: g.release_db() return send_file(f) c = g.db.execute_one_dict( ''' SELECT * FROM cluster WHERE name= ( SELECT cluster_name FROM job where id = %s) ''', [parent_job_id]) g.release_db() if c['name'] == os.environ['INFRABOX_CLUSTER_NAME']: abort(404) token = encode_job_token(job_id) headers = {'Authorization': 'token ' + token} url = '%s/api/job/output/%s?filename=%s' % (c['root_url'], parent_job_id, filename) try: r = requests.get(url, headers=headers, timeout=120, verify=False) if r.status_code != 200: f = None else: f = BytesIO(r.content) f.seek(0) except: f = None if not f: abort(404) return send_file(f, attachment_filename=filename)