示例#1
0
    def post(self):
        job_id = g.token['job']['id']
        key = "%s.tar.gz" % job_id
        key = key.replace('/', '_')

        storage.upload_output(request.files['output.tar.gz'].stream, key)
        return jsonify({})
示例#2
0
    def test_get_job_output(self):
        job_data = {
            "version": 1,
            "subject": "subject_val",
            "status": "status_val1",
            "color": "green"
        }

        file_name = "test_get_job_output.tmp_test_file.json"
        with open(file_name, 'w') as job_data_file:
            # Write data into json file
            json.dump(job_data, job_data_file)

        file_size = stat(file_name).st_size
        with open(file_name, 'r') as job_data_file:
            storage.upload_output(stream=job_data_file,
                                  key=self.job_id + '.tar.gz')
        remove(file_name)

        res = TestClient.get(
            '/api/v1/projects/%s/jobs/%s/output' %
            (self.project_id, self.job_id),
            TestClient.get_project_authorization(self.user_id,
                                                 self.project_id))

        file_name = file_name + '.out_test'
        with open(file_name, "wb") as output_file:
            output_file.write(res.data)
            out_size = output_file.tell()
        remove(file_name)

        self.assertEqual(out_size, file_size)
示例#3
0
    def post(self):
        job_id = g.token['job']['id']

        key = "%s.tar.gz" % job_id
        key = key.replace('/', '_')

        stream = request.files['output.tar.gz'].stream

        # determine all children
        jobs = g.db.execute_many_dict(
            '''
            SELECT cluster_name, dependencies
            FROM job
            WHERE build_id = (SELECT build_id FROM job WHERE id = %s)
            AND state = 'queued'
        ''', [job_id])

        clusters = set()

        for j in jobs:
            dependencies = j.get('dependencies', None)

            if not dependencies:
                continue

            for dep in dependencies:
                if dep['job-id'] != job_id:
                    continue

                clusters.add(j['cluster_name'])

        clusters = g.db.execute_many_dict(
            '''
            SELECT root_url
            FROM cluster
            WHERE active = true
            AND name = ANY (%s)
            AND name != %s
        ''', [list(clusters), os.environ['INFRABOX_CLUSTER_NAME']])

        g.release_db()

        storage.upload_output(stream, key)

        for c in clusters:
            stream.seek(0)
            url = '%s/api/job/output' % c['root_url']
            files = {'output.tar.gz': stream}
            token = encode_job_token(job_id)
            headers = {'Authorization': 'bearer ' + token}
            r = requests.post(url, files=files, headers=headers, timeout=120)

            if r.status_code != 200:
                abort(500, "Failed to upload data")

        return jsonify({})
示例#4
0
    def post(self):
        job_id = g.token['job']['id']

        for f, _ in request.files.items():
            key = "%s/%s" % (job_id, f)

            stream = request.files[f].stream

            # determine all children
            jobs = g.db.execute_many_dict('''
                SELECT cluster_name, dependencies
                FROM job
                WHERE build_id = (SELECT build_id FROM job WHERE id = %s)
                AND state = 'queued'
            ''', [job_id])

            current_cluster = g.db.execute_one_dict('''
                SELECT cluster_name
                FROM job
                WHERE id = %s
            ''', [job_id])['cluster_name']

            clusters = set()

            for j in jobs:
                dependencies = j.get('dependencies', None)

                if not dependencies:
                    continue

                for dep in dependencies:
                    if dep['job-id'] != job_id:
                        continue

                    clusters.add(j['cluster_name'])

            clusters = g.db.execute_many_dict('''
                SELECT root_url
                FROM cluster
                WHERE active = true
                AND enabled = true
                AND name = ANY (%s)
                AND name != %s
                AND name != %s
            ''', [list(clusters), os.environ['INFRABOX_CLUSTER_NAME'], current_cluster])

            g.release_db()

            storage.upload_output(stream, key)

            for c in clusters:
                stream.seek(0)
                url = '%s/api/job/output' % c['root_url']
                files = {f: stream}
                token = encode_job_token(job_id)
                headers = {'Authorization': 'bearer ' + token}
                r = requests.post(url, files=files, headers=headers, timeout=120, verify=False)

                if r.status_code != 200:
                    app.logger.error(r.text)
                    abort(500, "Failed to upload data")

            return jsonify({})