Ejemplo n.º 1
0
    def test_source(self):
        data = {"data": "dummy_data"}
        file_name = "test_source.tmp_test_file"

        with open(file_name, "w") as source_data_file:
            json.dump(data, source_data_file)

        file_size = stat(file_name).st_size

        TestClient.execute(
            """INSERT INTO source_upload (id, project_id, filename, filesize)
                              VALUES (%s, %s, %s, %s)
                           """,
            [self.source_upload_id, self.project_id, file_name, file_size])

        TestClient.execute(
            """UPDATE build SET source_upload_id = %s
                              WHERE id = %s""",
            [self.source_upload_id, self.build_id])

        with open(file_name, 'r') as source_data:
            storage.upload_project(source_data, file_name)
        remove(file_name)

        response = TestClient.get(self.url_ns + '/source', self.job_headers)
        response_size = TestUtils.get_stream_file_size(response.data)
        self.assertEqual(response_size, file_size)
Ejemplo n.º 2
0
    def post(self, project_id):
        project = g.db.execute_one_dict(
            '''
            SELECT type
            FROM project
            WHERE id = %s
        ''', [project_id])

        if not project:
            abort(404, 'Project not found')

        if project['type'] != 'upload':
            abort(400, 'Project is not of type "upload"')

        build_id = str(uuid.uuid4())
        key = '%s.zip' % build_id

        storage.upload_project(request.files['project.zip'].stream, key)

        build_number = g.db.execute_one_dict(
            '''
            SELECT count(distinct build_number) + 1 AS build_number
            FROM build AS b
            WHERE b.project_id = %s
        ''', [project_id])['build_number']

        source_upload_id = g.db.execute_one(
            '''
            INSERT INTO source_upload(filename, project_id, filesize) VALUES (%s, %s, 0) RETURNING ID
        ''', [key, project_id])[0]

        g.db.execute(
            '''
            INSERT INTO build (commit_id, build_number, project_id, source_upload_id, id)
            VALUES (null, %s, %s, %s, %s)
        ''', [build_number, project_id, source_upload_id, build_id])

        g.db.execute(
            '''
            INSERT INTO job (id, state, build_id, type, name, project_id,
                             dockerfile, build_only, cpu, memory)
            VALUES (gen_random_uuid(), 'queued', %s, 'create_job_matrix',
                    'Create Jobs', %s, '', false, 1, 1024);
        ''', [build_id, project_id])

        project_name = g.db.execute_one(
            '''
            SELECT name FROM project WHERE id = %s
        ''', [project_id])[0]

        url = '%s/dashboard/#/project/%s/build/%s/1' % (
            os.environ['INFRABOX_ROOT_URL'], project_name, build_number)

        data = {'build': {'id': build_id, 'number': build_number}, 'url': url}

        g.db.commit()

        return OK('successfully started build', data=data)
Ejemplo n.º 3
0
    def post(self, project_id, build_id):
        project = g.db.execute_one_dict(
            '''
            SELECT type
            FROM project
            WHERE id = %s
        ''', [project_id])

        if not project:
            abort(404, 'Project not found')

        if project['type'] != 'upload':
            abort(400, 'Project is not of type "upload"')

        key = '%s.zip' % build_id

        stream = request.files['project.zip'].stream
        storage.upload_project(stream, key)

        return OK('successfully uploaded data')
Ejemplo n.º 4
0
        def post(self, project_id):
            project = g.db.execute_one_dict(
                '''
                SELECT type
                FROM project
                WHERE id = %s
            ''', [project_id])

            if not project:
                abort(404, 'Project not found')

            if project['type'] != 'upload':
                abort(400, 'Project is not of type "upload"')

            build_id = str(uuid.uuid4())
            key = '%s.zip' % build_id

            stream = request.files['project.zip'].stream
            storage.upload_project(stream, key)

            clusters = g.db.execute_many_dict(
                '''
                SELECT root_url
                FROM cluster
                WHERE active = true
                AND enabled = true
                AND name != %s
            ''', [os.environ['INFRABOX_CLUSTER_NAME']])

            for c in clusters:
                stream.seek(0)
                url = '%s/api/v1/projects/%s/upload/%s/' % (
                    c['root_url'], project_id, build_id)
                files = {'project.zip': stream}
                token = encode_project_token(g.token['id'], project_id,
                                             'myproject')
                headers = {'Authorization': 'bearer ' + token}
                logger.info('Also uploading to %s', url)

                # TODO(ib-steffen): allow custom ca bundles
                r = requests.post(url,
                                  files=files,
                                  headers=headers,
                                  timeout=120,
                                  verify=False)

                if r.status_code != 200:
                    abort(500, "Failed to upload data")

            build_number = g.db.execute_one_dict(
                '''
                SELECT count(distinct build_number) + 1 AS build_number
                FROM build AS b
                WHERE b.project_id = %s
            ''', [project_id])['build_number']

            source_upload_id = g.db.execute_one(
                '''
                INSERT INTO source_upload(filename, project_id, filesize) VALUES (%s, %s, 0) RETURNING ID
            ''', [key, project_id])[0]

            g.db.execute(
                '''
                INSERT INTO build (commit_id, build_number, project_id, source_upload_id, id)
                VALUES (null, %s, %s, %s, %s)
            ''', [build_number, project_id, source_upload_id, build_id])

            definition = {
                'build_only': False,
                'resources': {
                    'limits': {
                        'cpu': 0.5,
                        'memory': 1024
                    }
                }
            }

            g.db.execute(
                '''
                INSERT INTO job (id, state, build_id, type, name, project_id,
                                 dockerfile, definition, cluster_name)
                VALUES (gen_random_uuid(), 'queued', %s, 'create_job_matrix',
                        'Create Jobs', %s, '', %s, %s);
            ''', [build_id, project_id,
                  json.dumps(definition), None])

            project_name = g.db.execute_one(
                '''
                SELECT name FROM project WHERE id = %s
            ''', [project_id])[0]

            root_url = get_root_url('global')
            url = '%s/dashboard/#/project/%s/build/%s/1' % (
                root_url, project_name, build_number)

            data = {
                'build': {
                    'id': build_id,
                    'number': build_number
                },
                'url': url
            }

            g.db.commit()

            return OK('successfully started build', data=data)
Ejemplo n.º 5
0
        def post(self, project_id):
            project = g.db.execute_one_dict(
                '''
                SELECT type
                FROM project
                WHERE id = %s
            ''', [project_id])

            if not project:
                abort(404, 'Project not found')

            if project['type'] != 'upload':
                abort(400, 'Project is not of type "upload"')

            build_id = str(uuid.uuid4())
            key = '%s.zip' % build_id

            stream = request.files['project.zip'].stream
            storage.upload_project(stream, key)

            clusters = g.db.execute_many_dict('''
                SELECT root_url
                FROM cluster
                WHERE active = true
                AND name != 'master'
            ''')

            for c in clusters:
                stream.seek(0)
                url = '%s/api/v1/projects/%s/upload/%s/' % (
                    c['root_url'], project_id, build_id)
                files = {'project.zip': stream}
                token = encode_project_token(g.token['id'], project_id)
                headers = {'Authorization': 'bearer ' + token}
                logger.info('Also uploading to %s', url)
                r = requests.post(url,
                                  files=files,
                                  headers=headers,
                                  timeout=120)

                if r.status_code != 200:
                    abort(500, "Failed to upload data")

            build_number = g.db.execute_one_dict(
                '''
                SELECT count(distinct build_number) + 1 AS build_number
                FROM build AS b
                WHERE b.project_id = %s
            ''', [project_id])['build_number']

            source_upload_id = g.db.execute_one(
                '''
                INSERT INTO source_upload(filename, project_id, filesize) VALUES (%s, %s, 0) RETURNING ID
            ''', [key, project_id])[0]

            g.db.execute(
                '''
                INSERT INTO build (commit_id, build_number, project_id, source_upload_id, id)
                VALUES (null, %s, %s, %s, %s)
            ''', [build_number, project_id, source_upload_id, build_id])

            g.db.execute(
                '''
                INSERT INTO job (id, state, build_id, type, name, project_id,
                                 dockerfile, build_only, cpu, memory)
                VALUES (gen_random_uuid(), 'queued', %s, 'create_job_matrix',
                        'Create Jobs', %s, '', false, 1, 1024);
            ''', [build_id, project_id])

            project_name = g.db.execute_one(
                '''
                SELECT name FROM project WHERE id = %s
            ''', [project_id])[0]

            url = '%s/dashboard/#/project/%s/build/%s/1' % (
                os.environ['INFRABOX_ROOT_URL'], project_name, build_number)

            data = {
                'build': {
                    'id': build_id,
                    'number': build_number
                },
                'url': url
            }

            g.db.commit()

            return OK('successfully started build', data=data)