Пример #1
0
    def post(self, project_id, build_id):
        '''
        Upload and trigger build
        '''
        project = g.db.execute_one_dict('''
            SELECT type
            FROM project
            WHERE id = %s
        ''', [project_id])

        if not project:
            abort(404, 'Project not found')

        if project['type'] != 'upload':
            abort(400, 'Project is not of type "upload"')

        key = '%s.zip' % build_id
        if not storage.exists(key):
            stream = request.files['project.zip'].stream
            storage.upload_project(stream, key)

        return OK('successfully uploaded data')
Пример #2
0
    def post(self):
        job_id = g.token['job']['id']

        for f, _ in request.files.items():
            key = "%s/%s" % (job_id, f)

            stream = request.files[f].stream

            # determine all children
            jobs = g.db.execute_many_dict(
                '''
                SELECT cluster_name, dependencies
                FROM job
                WHERE build_id = (SELECT build_id FROM job WHERE id = %s)
                AND state = 'queued'
            ''', [job_id])

            current_cluster = g.db.execute_one_dict(
                '''
                SELECT cluster_name
                FROM job
                WHERE id = %s
            ''', [job_id])['cluster_name']

            clusters = set()

            for j in jobs:
                dependencies = j.get('dependencies', None)

                if not dependencies:
                    continue

                for dep in dependencies:
                    if dep['job-id'] != job_id:
                        continue

                    clusters.add(j['cluster_name'])

            clusters = g.db.execute_many_dict(
                '''
                SELECT root_url
                FROM cluster
                WHERE active = true
                AND enabled = true
                AND name = ANY (%s)
                AND name != %s
                AND name != %s
            ''', [
                    list(clusters), os.environ['INFRABOX_CLUSTER_NAME'],
                    current_cluster
                ])

            g.release_db()

            if not storage.exists(key):
                storage.upload_output(stream, key)

            for c in clusters:
                stream.seek(0)
                url = '%s/api/job/output' % c['root_url']
                files = {f: stream}
                token = encode_job_token(job_id)
                headers = {'Authorization': 'bearer ' + token}
                r = requests.post(url,
                                  files=files,
                                  headers=headers,
                                  timeout=120,
                                  verify=False)

                if r.status_code != 200:
                    app.logger.error(r.text)
                    abort(500, "Failed to upload data")

            return jsonify({})