def post(self, project_id): ''' Create new token ''' project_name = g.db.execute_one( """ SELECT name FROM project WHERE id = %s """, [project_id]) if not project_name: return abort(400, 'Invalid project id.') b = request.get_json() result = g.db.execute_one( """ SELECT COUNT(*) FROM auth_token WHERE project_id = %s AND description = %s """, [project_id, b['description']])[0] if result != 0: return abort(400, 'Token with such a description already exists.') result = g.db.execute_one_dict( """ INSERT INTO auth_token (description, scope_push, scope_pull, project_id) VALUES (%s, %s, %s, %s) RETURNING id """, [b['description'], b['scope_push'], b['scope_pull'], project_id]) token_id = result['id'] token = encode_project_token(token_id, project_id, project_name) g.db.commit() return OK('Successfully added token.', {'token': token})
def setUp(self): conn = connect_db() cur = conn.cursor() cur.execute('''DELETE FROM job''') cur.execute('''DELETE FROM auth_token''') cur.execute('''DELETE FROM collaborator''') cur.execute('''DELETE FROM project''') cur.execute('''DELETE FROM "user"''') cur.execute('''DELETE FROM source_upload''') cur.execute('''DELETE FROM build''') cur.execute('''DELETE FROM test_run''') cur.execute('''DELETE FROM measurement''') cur.execute('''DELETE FROM job_markup''') cur.execute('''DELETE FROM secret''') cur.execute('''INSERT INTO "user"(id, github_id, avatar_url, name, email, github_api_token, username) VALUES(%s, 1, 'avatar', 'name', 'email', 'token', 'login')''', (self.user_id,)) cur.execute('''INSERT INTO project(name, type, id, public) VALUES('test', 'upload', %s, true)''', (self.project_id,)) cur.execute('''INSERT INTO collaborator(project_id, user_id, role) VALUES(%s, %s, 'Owner')''', (self.project_id, self.user_id,)) cur.execute('''INSERT INTO auth_token(project_id, id, description, scope_push, scope_pull) VALUES(%s, %s, 'asd', true, true)''', (self.project_id, self.token_id,)) cur.execute('''INSERT INTO secret(project_id, name, value) VALUES(%s, 'SECRET_ENV', %s)''', (self.project_id, encrypt_secret('hello world'))) conn.commit() os.environ['INFRABOX_CLI_TOKEN'] = encode_project_token(self.token_id, self.project_id, 'myproject') self.root_url = os.environ['INFRABOX_ROOT_URL']
def get_project_headers(self, project_token=None): # pragma: no cover if not project_token: project_token = self.project_token token = encode_project_token(project_token, self.project_id, 'myproject') h = {'Authorization': 'Basic %s' % base64.b64encode('infrabox:%s' % token)} return h
def post(self, project_id): b = request.get_json() result = g.db.execute_one_dict(''' INSERT INTO auth_token (description, scope_push, scope_pull, project_id) VALUES (%s, %s, %s, %s) RETURNING id ''', [b['description'], b['scope_push'], b['scope_pull'], project_id]) token_id = result['id'] token = encode_project_token(token_id, project_id) g.db.commit() return OK('Successfully added token', {'token': token})
def _get_headers(self): auth = base64.b64encode( 'infrabox:%s' % encode_project_token(self.token, self.project_id)) headers = {'authorization': "Basic " + auth} return headers
def get(self, url, password='******'): auth = base64.b64encode('infrabox:%s' % encode_project_token( password, '2514af82-3c4f-4bb5-b1da-a89a0ced5e6f')) headers = {'authorization': "Basic " + auth} return requests.get(url, headers=headers)
def get_project_authorization(token_id, project_id): # pragma: no cover project_token = encode_project_token(token_id, project_id, 'myproject') h = {'Authorization': 'token %s' % project_token} return h
def post(self, project_id): project = g.db.execute_one_dict( ''' SELECT type FROM project WHERE id = %s ''', [project_id]) if not project: abort(404, 'Project not found') if project['type'] != 'upload': abort(400, 'Project is not of type "upload"') build_id = str(uuid.uuid4()) key = '%s.zip' % build_id stream = request.files['project.zip'].stream storage.upload_project(stream, key) clusters = g.db.execute_many_dict( ''' SELECT root_url FROM cluster WHERE active = true AND enabled = true AND name != %s ''', [os.environ['INFRABOX_CLUSTER_NAME']]) for c in clusters: stream.seek(0) url = '%s/api/v1/projects/%s/upload/%s/' % ( c['root_url'], project_id, build_id) files = {'project.zip': stream} token = encode_project_token(g.token['id'], project_id, 'myproject') headers = {'Authorization': 'bearer ' + token} logger.info('Also uploading to %s', url) # TODO(ib-steffen): allow custom ca bundles r = requests.post(url, files=files, headers=headers, timeout=120, verify=False) if r.status_code != 200: abort(500, "Failed to upload data") build_number = g.db.execute_one_dict( ''' SELECT count(distinct build_number) + 1 AS build_number FROM build AS b WHERE b.project_id = %s ''', [project_id])['build_number'] source_upload_id = g.db.execute_one( ''' INSERT INTO source_upload(filename, project_id, filesize) VALUES (%s, %s, 0) RETURNING ID ''', [key, project_id])[0] g.db.execute( ''' INSERT INTO build (commit_id, build_number, project_id, source_upload_id, id) VALUES (null, %s, %s, %s, %s) ''', [build_number, project_id, source_upload_id, build_id]) definition = { 'build_only': False, 'resources': { 'limits': { 'cpu': 0.5, 'memory': 1024 } } } g.db.execute( ''' INSERT INTO job (id, state, build_id, type, name, project_id, dockerfile, definition, cluster_name) VALUES (gen_random_uuid(), 'queued', %s, 'create_job_matrix', 'Create Jobs', %s, '', %s, %s); ''', [build_id, project_id, json.dumps(definition), None]) project_name = g.db.execute_one( ''' SELECT name FROM project WHERE id = %s ''', [project_id])[0] root_url = get_root_url('global') url = '%s/dashboard/#/project/%s/build/%s/1' % ( root_url, project_name, build_number) data = { 'build': { 'id': build_id, 'number': build_number }, 'url': url } g.db.commit() return OK('successfully started build', data=data)
def get_project_authorization(user_id, project_id): # pragma: no cover user_token = encode_user_token(user_id) project_token = encode_project_token(user_token, project_id) h = {'Authorization': 'token %s' % project_token} return h
def post(self, project_id): project = g.db.execute_one_dict( ''' SELECT type FROM project WHERE id = %s ''', [project_id]) if not project: abort(404, 'Project not found') if project['type'] != 'upload': abort(400, 'Project is not of type "upload"') build_id = str(uuid.uuid4()) key = '%s.zip' % build_id stream = request.files['project.zip'].stream storage.upload_project(stream, key) clusters = g.db.execute_many_dict(''' SELECT root_url FROM cluster WHERE active = true AND name != 'master' ''') for c in clusters: stream.seek(0) url = '%s/api/v1/projects/%s/upload/%s/' % ( c['root_url'], project_id, build_id) files = {'project.zip': stream} token = encode_project_token(g.token['id'], project_id) headers = {'Authorization': 'bearer ' + token} logger.info('Also uploading to %s', url) r = requests.post(url, files=files, headers=headers, timeout=120) if r.status_code != 200: abort(500, "Failed to upload data") build_number = g.db.execute_one_dict( ''' SELECT count(distinct build_number) + 1 AS build_number FROM build AS b WHERE b.project_id = %s ''', [project_id])['build_number'] source_upload_id = g.db.execute_one( ''' INSERT INTO source_upload(filename, project_id, filesize) VALUES (%s, %s, 0) RETURNING ID ''', [key, project_id])[0] g.db.execute( ''' INSERT INTO build (commit_id, build_number, project_id, source_upload_id, id) VALUES (null, %s, %s, %s, %s) ''', [build_number, project_id, source_upload_id, build_id]) g.db.execute( ''' INSERT INTO job (id, state, build_id, type, name, project_id, dockerfile, build_only, cpu, memory) VALUES (gen_random_uuid(), 'queued', %s, 'create_job_matrix', 'Create Jobs', %s, '', false, 1, 1024); ''', [build_id, project_id]) project_name = g.db.execute_one( ''' SELECT name FROM project WHERE id = %s ''', [project_id])[0] url = '%s/dashboard/#/project/%s/build/%s/1' % ( os.environ['INFRABOX_ROOT_URL'], project_name, build_number) data = { 'build': { 'id': build_id, 'number': build_number }, 'url': url } g.db.commit() return OK('successfully started build', data=data)