def upload_component_file(user, c_id): if not auth.is_admin(user): raise auth.UNAUTHORIZED COMPONENT_FILES = models.COMPONENT_FILES component = v1_utils.verify_existence_and_get(c_id, _TABLE) swift = dci_config.get_store('components') file_id = utils.gen_uuid() file_path = swift.build_file_path(component['topic_id'], c_id, file_id) swift = dci_config.get_store('components') swift.upload(file_path, flask.request.stream) s_file = swift.head(file_path) values = dict.fromkeys(['md5', 'mime', 'component_id', 'name']) values.update({ 'id': file_id, 'component_id': c_id, 'name': file_id, 'created_at': datetime.datetime.utcnow().isoformat(), 'md5': s_file['etag'], 'mime': s_file['content-type'], 'size': s_file['content-length'] }) query = COMPONENT_FILES.insert().values(**values) flask.g.db_conn.execute(query) result = json.dumps({'component_file': values}) return flask.Response(result, 201, content_type='application/json')
def test_purge_failure(admin, components_user_ids, topic_user_id): component_id = components_user_ids[0] url = '/api/v1/components/%s/files' % component_id c_file1 = admin.post(url, data='lol') assert c_file1.status_code == 201 c_files = admin.get('/api/v1/components/%s/files' % component_id) assert len(c_files.data['component_files']) == 1 d_component = admin.delete('/api/v1/components/%s' % component_id) assert d_component.status_code == 204 to_purge = admin.get('/api/v1/components/purge').data assert len(to_purge['components']) == 1 # purge will fail with mock.patch('dci.stores.filesystem.FileSystem.delete') as mock_delete: path1 = files_utils.build_file_path( topic_user_id, component_id, c_file1.data['component_file']['id']) # noqa mock_delete.side_effect = dci_exc.StoreExceptions('error') purge_res = admin.post('/api/v1/components/purge') assert purge_res.status_code == 400 store = dci_config.get_store('components') store.get(path1) to_purge = admin.get('/api/v1/components/purge').data assert len(to_purge['components']) == 1
def test_purge(user, admin, job_user_id, jobstate_user_id, team_user_id): job = user.get('/api/v1/jobs/%s' % job_user_id).data['job'] # create a file file_id1 = t_utils.post_file(user, jobstate_user_id, FileDesc('kikoolol', 'content')) djob = admin.delete('/api/v1/jobs/%s' % job_user_id, headers={'If-match': job['etag']}) assert djob.status_code == 204 to_purge_jobs = admin.get('/api/v1/jobs/purge').data assert len(to_purge_jobs['jobs']) == 1 to_purge_files = admin.get('/api/v1/files/purge').data assert len(to_purge_files['files']) == 1 admin.post('/api/v1/jobs/purge') path1 = files_utils.build_file_path(team_user_id, job_user_id, file_id1) store = dci_config.get_store('files') # the purge removed the file from the backend, get() must raise exception with pytest.raises(dci_exc.StoreExceptions): store.get(path1) to_purge_jobs = admin.get('/api/v1/jobs/purge').data assert len(to_purge_jobs['jobs']) == 0 to_purge_files = admin.get('/api/v1/files/purge').data assert len(to_purge_files['files']) == 0
def upload_component_file(user, c_id): COMPONENT_FILES = models.COMPONENT_FILES component = v1_utils.verify_existence_and_get(c_id, _TABLE) if str(component['topic_id']) not in v1_utils.user_topic_ids(user): raise dci_exc.Unauthorized() store = dci_config.get_store('components') file_id = utils.gen_uuid() file_path = files_utils.build_file_path(component['topic_id'], c_id, file_id) content = files_utils.get_stream_or_content_from_request(flask.request) store.upload(file_path, content) s_file = store.head(file_path) values = dict.fromkeys(['md5', 'mime', 'component_id', 'name']) values.update({ 'id': file_id, 'component_id': c_id, 'name': file_id, 'created_at': datetime.datetime.utcnow().isoformat(), 'md5': s_file['etag'], 'mime': s_file['content-type'], 'size': s_file['content-length'] }) query = COMPONENT_FILES.insert().values(**values) flask.g.db_conn.execute(query) result = json.dumps({'component_file': values}) return flask.Response(result, 201, content_type='application/json')
def get_all_results_from_jobs(user, j_id): """Get all results from job. """ job = v1_utils.verify_existence_and_get(j_id, _TABLE) if not user.is_in_team(job['team_id']): raise auth.UNAUTHORIZED swift = dci_config.get_store('files') job_files = json.loads(files.get_all_files(j_id).response[0])['files'] r_files = [file for file in job_files if file['mime'] == 'application/junit'] results = [] for file in r_files: file_path = swift.build_file_path(file['team_id'], j_id, file['id']) _, file_descriptor = swift.get(file_path) data = tsfm.junit2dict(file_descriptor.read()) results.append({'filename': file['name'], 'name': file['name'], 'total': data['total'], 'failures': data['failures'], 'errors': data['errors'], 'skips': data['skips'], 'time': data['time'], 'success': data['success'], 'testscases': data['testscases']}) return flask.jsonify({'results': results, '_meta': {'count': len(results)}})
def test_purge(admin, components_user_ids, topic_user_id): component_id = components_user_ids[0] store = dci_config.get_store('components') url = '/api/v1/components/%s/files' % component_id c_file1 = admin.post(url, data='lol') assert c_file1.status_code == 201 path1 = files_utils.build_file_path(topic_user_id, component_id, c_file1.data['component_file']['id']) store.get(path1) url = '/api/v1/components/%s/files' % component_id c_file2 = admin.post(url, data='lol') assert c_file2.status_code == 201 path2 = files_utils.build_file_path(topic_user_id, component_id, c_file2.data['component_file']['id']) store.get(path2) admin.delete('/api/v1/components/%s' % component_id) to_purge = admin.get('/api/v1/components/purge').data assert len(to_purge['components']) == 1 c_purged = admin.post('/api/v1/components/purge') assert c_purged.status_code == 204 with pytest.raises(dci_exc.StoreExceptions): store.get(path1) with pytest.raises(dci_exc.StoreExceptions): store.get(path2) to_purge = admin.get('/api/v1/components/purge').data assert len(to_purge['components']) == 0
def purge_archived_files(user): # get all archived files archived_files = base.get_archived_resources(_TABLE) store = dci_config.get_store('files') # for each file delete it from within a transaction # if the SQL deletion or the Store deletion fail then # rollback the transaction, otherwise commit. for file in archived_files: tx = flask.g.db_conn.begin() try: q_delete_file = _TABLE.delete().where(_TABLE.c.id == file['id']) flask.g.db_conn.execute(q_delete_file) file_path = files_utils.build_file_path(file['team_id'], file['job_id'], file['id']) store.delete(file_path) tx.commit() logger.debug('file %s removed' % file_path) except dci_exc.StoreExceptions as e: if e.status_code == 404: logger.warn('file %s not found in store' % file_path) else: raise e except sa_exc.DBAPIError as e: logger.error('Error while removing file %s, message: %s' % (file_path, str(e))) tx.rollback() raise dci_exc.DCIException(str(e)) return flask.Response(None, 204, content_type='application/json')
def test_purge_failure(app, admin, user, jobstate_user_id, job_user_id, team_user_id): # create two files and archive them file_id1 = t_utils.post_file(user, jobstate_user_id, FileDesc('kikoolol', 'content')) user.delete('/api/v1/files/%s' % file_id1) file_id2 = t_utils.post_file(user, jobstate_user_id, FileDesc('kikoolol2', 'content2')) user.delete('/api/v1/files/%s' % file_id2) to_purge = admin.get('/api/v1/files/purge').data assert len(to_purge['files']) == 2 # purge will fail with mock.patch('dci.stores.filesystem.FileSystem.delete') as mock_delete: mock_delete.side_effect = dci_exc.StoreExceptions('error') purge_res = admin.post('/api/v1/files/purge') assert purge_res.status_code == 400 path1 = files_utils.build_file_path(team_user_id, job_user_id, file_id1) path2 = files_utils.build_file_path(team_user_id, job_user_id, file_id2) store = dci_config.get_store('files') store.get(path1) store.get(path2) to_purge = admin.get('/api/v1/files/purge').data assert len(to_purge['files']) == 2
def test_purge_failure(user, admin, job_user_id, jobstate_user_id, team_user_id): job = user.get('/api/v1/jobs/%s' % job_user_id).data['job'] # create a file file_id1 = t_utils.post_file(user, jobstate_user_id, FileDesc('kikoolol', 'content')) djob = admin.delete('/api/v1/jobs/%s' % job_user_id, headers={'If-match': job['etag']}) assert djob.status_code == 204 to_purge_jobs = admin.get('/api/v1/jobs/purge').data assert len(to_purge_jobs['jobs']) == 1 to_purge_files = admin.get('/api/v1/files/purge').data assert len(to_purge_files['files']) == 1 # purge will fail with mock.patch('dci.stores.filesystem.FileSystem.delete') as mock_delete: mock_delete.side_effect = dci_exc.StoreExceptions('error') purge_res = admin.post('/api/v1/jobs/purge') assert purge_res.status_code == 400 path1 = files_utils.build_file_path(team_user_id, job_user_id, file_id1) store = dci_config.get_store('files') # because the delete fail the backend didn't remove the files and the # files are still in the database store.get(path1) to_purge_files = admin.get('/api/v1/files/purge').data assert len(to_purge_files['files']) == 1 to_purge_jobs = admin.get('/api/v1/jobs/purge').data assert len(to_purge_jobs['jobs']) == 1
def get_file_descriptor(file_object): store = dci_config.get_store('files') file_path = files_utils.build_file_path(file_object['team_id'], file_object['job_id'], file_object['id']) # Check if file exist on the storage engine store.head(file_path) _, file_descriptor = store.get(file_path) return file_descriptor
def create_files(user): file_info = get_file_info_from_headers(dict(flask.request.headers)) values = dict.fromkeys( ['md5', 'mime', 'jobstate_id', 'job_id', 'name', 'test_id']) values.update(file_info) if values.get('jobstate_id') is None and values.get('job_id') is None: raise dci_exc.DCIException('HTTP headers DCI-JOBSTATE-ID or ' 'DCI-JOB-ID must be specified') if values.get('name') is None: raise dci_exc.DCIException('HTTP header DCI-NAME must be specified') if values.get('jobstate_id') and values.get('job_id') is None: jobstate = v1_utils.verify_existence_and_get(values.get('jobstate_id'), models.JOBSTATES) values['job_id'] = jobstate['job_id'] job = v1_utils.verify_existence_and_get(values.get('job_id'), models.JOBS) if (user.is_not_in_team(job['team_id']) and user.is_read_only_user() and user.is_not_epm()): raise dci_exc.Unauthorized() file_id = utils.gen_uuid() file_path = files_utils.build_file_path(job['team_id'], values['job_id'], file_id) content = files_utils.get_stream_or_content_from_request(flask.request) store = dci_config.get_store('files') store.upload(file_path, content) s_file = store.head(file_path) etag = utils.gen_etag() values.update({ 'id': file_id, 'created_at': datetime.datetime.utcnow().isoformat(), 'updated_at': datetime.datetime.utcnow().isoformat(), 'team_id': job['team_id'], 'md5': None, 'size': s_file['content-length'], 'state': 'active', 'etag': etag, }) with flask.g.db_conn.begin(): q_insert_file = _TABLE.insert().values(**values) flask.g.db_conn.execute(q_insert_file) result = json.dumps({'file': values}) if values['mime'] == 'application/junit': _, junit_file = store.get(file_path) _process_junit_file(values, junit_file, job) return flask.Response(result, 201, content_type='application/json')
def download_component_file(user, c_id, f_id): swift = dci_config.get_store('components') component = v1_utils.verify_existence_and_get(c_id, _TABLE) v1_utils.verify_team_in_topic(user, component['topic_id']) v1_utils.verify_existence_and_get(f_id, models.COMPONENT_FILES) auth.check_export_control(user, component) file_path = swift.build_file_path(component['topic_id'], c_id, f_id) # Check if file exist on the storage engine swift.head(file_path) return flask.Response(swift.get_object(file_path))
def download_component_file(user, c_id, f_id): store = dci_config.get_store('components') component = v1_utils.verify_existence_and_get(c_id, _TABLE) topic = v1_utils.verify_existence_and_get(component['topic_id'], models.TOPICS) export_control.verify_access_to_topic(user, topic) component_file = v1_utils.verify_existence_and_get( f_id, models.COMPONENT_FILES) file_path = files_utils.build_file_path(component['topic_id'], c_id, f_id) # Check if file exist on the storage engine store.head(file_path) _, file_descriptor = store.get(file_path) return flask.send_file(file_descriptor, mimetype=component_file['mime'])
def get_file_content(user, file_id): file = v1_utils.verify_existence_and_get(file_id, _TABLE) swift = dci_config.get_store('files') if not user.is_in_team(file['team_id']): raise auth.UNAUTHORIZED file_path = swift.build_file_path(file['team_id'], file['job_id'], file_id) # Check if file exist on the storage engine swift.head(file_path) _, file_descriptor = swift.get(file_path) return flask.send_file(file_descriptor, mimetype=file['mime'] or 'text/plain', as_attachment=True, attachment_filename=file['name'].replace(' ', '_'))
def purge_archived_components(user): # get all archived components archived_components = base.get_archived_resources(_TABLE) store = dci_config.get_store('components') # for each component delete it and all the component_files associated # from within a transaction # if the SQL deletion or the Store deletion fail then # rollback the transaction, otherwise commit. for cmpt in archived_components: get_cmpt_files = v1_utils.QueryBuilder(models.COMPONENT_FILES) get_cmpt_files.add_extra_condition( models.COMPONENT_FILES.c.component_id == cmpt['id']) cmpt_files = get_cmpt_files.execute(fetchall=True, use_labels=False) for cmpt_file in cmpt_files: tx = flask.g.db_conn.begin() file_path = files_utils.build_file_path(cmpt['topic_id'], cmpt['id'], cmpt_file['id']) try: q_delete_cfile = models.COMPONENT_FILES.delete().\ where(models.COMPONENT_FILES.c.id == cmpt_file['id']) flask.g.db_conn.execute(q_delete_cfile) store.delete(file_path) tx.commit() except dci_exc.StoreExceptions as e: if e.status_code == 404: LOG.warn('file %s not found in store' % file_path) else: raise e except Exception as e: tx.rollback() LOG.error( 'Error while removing component file %s, message: %s' % (file_path, str(e))) raise dci_exc.DCIException(str(e)) flask.g.db_conn.execute( _TABLE.delete().where(_TABLE.c.id == cmpt['id'])) return flask.Response(None, 204, content_type='application/json')
def delete_component_file(user, c_id, f_id): COMPONENT_FILES = models.COMPONENT_FILES component = v1_utils.verify_existence_and_get(c_id, _TABLE) if str(component['topic_id']) not in v1_utils.user_topic_ids(user): raise dci_exc.Unauthorized() v1_utils.verify_existence_and_get(f_id, COMPONENT_FILES) where_clause = COMPONENT_FILES.c.id == f_id query = COMPONENT_FILES.delete().where(where_clause) result = flask.g.db_conn.execute(query) if not result.rowcount: raise dci_exc.DCIDeleteConflict('Component File', f_id) store = dci_config.get_store('components') file_path = files_utils.build_file_path(component['topic_id'], c_id, f_id) store.delete(file_path) return flask.Response(None, 204, content_type='application/json')
def test_purge(app, admin, user, jobstate_user_id, team_user_id, job_user_id): # create two files and archive them file_id1 = t_utils.post_file(user, jobstate_user_id, FileDesc('kikoolol', 'content')) user.delete('/api/v1/files/%s' % file_id1) file_id2 = t_utils.post_file(user, jobstate_user_id, FileDesc('kikoolol2', 'content2')) user.delete('/api/v1/files/%s' % file_id2) to_purge = admin.get('/api/v1/files/purge').data assert len(to_purge['files']) == 2 admin.post('/api/v1/files/purge') path1 = files_utils.build_file_path(team_user_id, job_user_id, file_id1) store = dci_config.get_store('files') # the purge removed the file from the backend, get() must raise exception with pytest.raises(dci_exc.StoreExceptions): store.get(path1) path2 = files_utils.build_file_path(team_user_id, job_user_id, file_id2) with pytest.raises(dci_exc.StoreExceptions): store.get(path2) to_purge = admin.get('/api/v1/files/purge').data assert len(to_purge['files']) == 0
def delete_component_file(user, c_id, f_id): if not auth.is_admin(user): raise auth.UNAUTHORIZED COMPONENT_FILES = models.COMPONENT_FILES component = v1_utils.verify_existence_and_get(c_id, _TABLE) v1_utils.verify_existence_and_get(f_id, COMPONENT_FILES) where_clause = COMPONENT_FILES.c.id == f_id query = COMPONENT_FILES.delete().where(where_clause) result = flask.g.db_conn.execute(query) if not result.rowcount: raise dci_exc.DCIDeleteConflict('Component File', f_id) swift = dci_config.get_store('components') file_path = swift.build_file_path(component['topic_id'], c_id, f_id) swift.delete(file_path) return flask.Response(None, 204, content_type='application/json')
def get_file_content(user, file_id): file = v1_utils.verify_existence_and_get(file_id, _TABLE) swift = dci_config.get_store('files') def get_object(swift_object): for block in swift.get(swift_object)[1]: yield block if not (auth.is_admin(user) or auth.is_in_team(user, file['team_id'])): raise auth.UNAUTHORIZED file_path = swift.build_file_path(file['team_id'], file['job_id'], file_id) # Check if file exist on the storage engine swift.head(file_path) filename = file['name'].replace(' ', '_') headers = { 'Content-Length': file['size'], 'Content-Disposition': 'attachment; filename="%s"' % filename } return flask.Response(get_object(file_path), content_type=file['mime'] or 'text/plain', headers=headers)
def create_files(user): file_info = get_file_info_from_headers(dict(flask.request.headers)) swift = dci_config.get_store('files') values = dict.fromkeys( ['md5', 'mime', 'jobstate_id', 'job_id', 'name', 'test_id']) values.update(file_info) if values.get('jobstate_id') is None and values.get('job_id') is None: raise dci_exc.DCIException('HTTP headers DCI-JOBSTATE-ID or ' 'DCI-JOB-ID must be specified') if values.get('name') is None: raise dci_exc.DCIException('HTTP header DCI-NAME must be specified') if values['jobstate_id']: query = v1_utils.QueryBuilder(models.JOBSTATES) query.add_extra_condition( models.JOBSTATES.c.id == values['jobstate_id']) row = query.execute(fetchone=True) if row is None: raise dci_exc.DCINotFound('Jobstate', values['jobstate_id']) values['job_id'] = row['jobstates_job_id'] query = v1_utils.QueryBuilder(models.JOBS) if not auth.is_admin(user): query.add_extra_condition(models.JOBS.c.team_id == user['team_id']) query.add_extra_condition(models.JOBS.c.id == values['job_id']) row = query.execute(fetchone=True) if row is None: raise dci_exc.DCINotFound('Job', values['job_id']) file_id = utils.gen_uuid() # ensure the directory which will contains the file actually exist file_path = swift.build_file_path(user['team_id'], values['job_id'], file_id) content = files.get_stream_or_content_from_request(flask.request) swift.upload(file_path, content) s_file = swift.head(file_path) etag = utils.gen_etag() values.update({ 'id': file_id, 'created_at': datetime.datetime.utcnow().isoformat(), 'updated_at': datetime.datetime.utcnow().isoformat(), 'team_id': user['team_id'], 'md5': None, 'size': s_file['content-length'], 'state': 'active', 'etag': etag, }) query = _TABLE.insert().values(**values) with flask.g.db_conn.begin(): flask.g.db_conn.execute(query) result = json.dumps({'file': values}) if values['mime'] == 'application/junit': _, file_descriptor = swift.get(file_path) junit = tsfm.junit2dict(file_descriptor.read()) query = models.TESTS_RESULTS.insert().values({ 'id': utils.gen_uuid(), 'created_at': values['created_at'], 'updated_at': datetime.datetime.utcnow().isoformat(), 'file_id': file_id, 'job_id': values['job_id'], 'name': values['name'], 'success': junit['success'], 'failures': junit['failures'], 'errors': junit['errors'], 'skips': junit['skips'], 'total': junit['total'], 'time': junit['time'] }) flask.g.db_conn.execute(query) files_events.create_event(file_id, models.FILES_CREATE) return flask.Response(result, 201, content_type='application/json')
def init_db(db_conn, minimal, file): """Initialize the database with fake datas Create an admin team and 2 other teams HP and DELL Create 3 topics, 1 common and 2 scoped, 1 for each team """ db_ins = functools.partial(db_insert, db_conn) time = time_helper() # Create a super admin team_admin = db_ins(models.TEAMS, name='admin') # Create the three mandatory roles super_admin_role = { 'name': 'Super Admin', 'label': 'SUPER_ADMIN', 'description': 'Admin of the platform', } admin_role = { 'name': 'Admin', 'label': 'ADMIN', 'description': 'Admin of a team', } user_role = { 'name': 'User', 'label': 'USER', 'description': 'Regular User', } admin_role_id = db_ins(models.ROLES, **admin_role) user_role_id = db_ins(models.ROLES, **user_role) super_admin_role_id = db_ins(models.ROLES, **super_admin_role) db_ins(models.USERS, name='admin', role_id=super_admin_role_id, team_id=team_admin, password=auth.hash_password('admin')) if minimal: return # Create two other teams team_hp = db_ins(models.TEAMS, name='hp') team_dell = db_ins(models.TEAMS, name='dell') # Creates according users, 1 admin 1 user for other teams db_ins(models.USERS, name='user_hp', role_id=user_role_id, team_id=team_hp, password=auth.hash_password('password')) db_ins(models.USERS, name='admin_hp', role_id=admin_role_id, team_id=team_hp, password=auth.hash_password('password')) db_ins(models.USERS, name='user_dell', role_id=user_role_id, team_id=team_dell, password=auth.hash_password('password')) db_ins(models.USERS, name='admin_dell', role_id=admin_role_id, team_id=team_dell, password=auth.hash_password('password')) # Create 3 topics, 1 common and 2 scoped topic_common = db_ins(models.TOPICS, name='topic_common') topic_hp = db_ins(models.TOPICS, name='topic_HP') topic_dell = db_ins(models.TOPICS, name='topic_DELL') # Attach teams to topics db_ins(models.JOINS_TOPICS_TEAMS, topic_id=topic_common, team_id=team_admin) db_ins(models.JOINS_TOPICS_TEAMS, topic_id=topic_hp, team_id=team_admin) db_ins(models.JOINS_TOPICS_TEAMS, topic_id=topic_dell, team_id=team_admin) db_ins(models.JOINS_TOPICS_TEAMS, topic_id=topic_common, team_id=team_hp) db_ins(models.JOINS_TOPICS_TEAMS, topic_id=topic_hp, team_id=team_hp) db_ins(models.JOINS_TOPICS_TEAMS, topic_id=topic_common, team_id=team_dell) db_ins(models.JOINS_TOPICS_TEAMS, topic_id=topic_dell, team_id=team_dell) # Create 2 remotecis per team remoteci_hp_1 = { 'name': 'HP_1', 'team_id': team_hp, 'data': { 'storage': 'netapp', 'network': 'HP', 'hardware': 'Intel', 'virtualization': 'KVM' } } remoteci_hp_1 = db_ins(models.REMOTECIS, **remoteci_hp_1) remoteci_hp_2 = { 'name': 'HP_2', 'team_id': team_hp, 'data': { 'storage': 'ceph', 'network': 'Cisco', 'hardware': 'HP', 'virtualization': 'VMWare' } } remoteci_hp_2 = db_ins(models.REMOTECIS, **remoteci_hp_2) remoteci_dell_1 = { 'name': 'Dell_1', 'team_id': team_dell, 'data': { 'storage': 'swift', 'network': 'Juniper', 'hardware': 'Dell', 'virtualization': 'Xen' } } remoteci_dell_1 = db_ins(models.REMOTECIS, **remoteci_dell_1) remoteci_dell_2 = { 'name': 'Dell_2', 'team_id': team_dell, 'data': { 'storage': 'AWS', 'network': 'Brocade', 'hardware': 'Huawei', 'virtualization': 'HyperV' } } remoteci_dell_2 = db_ins(models.REMOTECIS, **remoteci_dell_2) # Create 2 components per topic component_common_1 = db_ins(models.COMPONENTS, topic_id=topic_common, type='git', name='Khaleesi', created_at=time[3][15]) component_common_2 = db_ins(models.COMPONENTS, topic_id=topic_common, type='image', name='RDO Manager', created_at=time[2][20]) component_hp_1 = db_ins(models.COMPONENTS, topic_id=topic_hp, type='package', name='OSP director', created_at=time[3][5]) component_hp_2 = db_ins(models.COMPONENTS, topic_id=topic_hp, type='gerrit_review', name='DCI-control-server', created_at=time[2][2]) component_dell_1 = db_ins(models.COMPONENTS, topic_id=topic_dell, type='git', name='Khaleesi', created_at=time[2][21]) component_dell_2 = db_ins(models.COMPONENTS, topic_id=topic_dell, type='package', name='OSP director', created_at=time[3][12]) # Create 2 jobdefinitions per topic jobdef_common_1 = db_ins(models.JOBDEFINITIONS, topic_id=topic_common, name='Common tox v0.8', component_types=['git']) jobdef_common_2 = db_ins(models.JOBDEFINITIONS, topic_id=topic_common, name='Common tox v2.1.1', component_types=['git']) jobdef_hp_1 = db_ins(models.JOBDEFINITIONS, topic_id=topic_hp, name='HP tempest v0.4.2', component_types=['OSP director']) jobdef_hp_2 = db_ins(models.JOBDEFINITIONS, topic_id=topic_hp, name='HP tempest v1.1', component_types=['package', 'gerrit_review']) jobdef_dell_1 = db_ins(models.JOBDEFINITIONS, topic_id=topic_dell, name='Dell khaleesi-tempest v0.8', component_types=['git']) jobdef_dell_2 = db_ins(models.JOBDEFINITIONS, topic_id=topic_dell, name='Dell khaleesi-tempest v1.2.15', component_types=['git', 'package']) # Creates 3 tests type test_common = db_ins(models.TESTS, name='tox', team_id=team_admin) test_hp = db_ins(models.TESTS, name='tempest', team_id=team_hp) test_dell = db_ins(models.TESTS, name='khaleesi-tempest', team_id=team_dell) db_ins(models.JOIN_JOBDEFINITIONS_TESTS, jobdefinition_id=jobdef_common_1, test_id=test_common) db_ins(models.JOIN_JOBDEFINITIONS_TESTS, jobdefinition_id=jobdef_common_2, test_id=test_common) db_ins(models.JOIN_JOBDEFINITIONS_TESTS, jobdefinition_id=jobdef_hp_1, test_id=test_hp) db_ins(models.JOIN_JOBDEFINITIONS_TESTS, jobdefinition_id=jobdef_hp_2, test_id=test_hp) db_ins(models.JOIN_JOBDEFINITIONS_TESTS, jobdefinition_id=jobdef_dell_1, test_id=test_dell) db_ins(models.JOIN_JOBDEFINITIONS_TESTS, jobdefinition_id=jobdef_dell_2, test_id=test_dell) # Creates 4 jobs for each jobdefinition (4*6=24 in total for pagination) job_id = db_ins(models.JOBS, status='new', jobdefinition_id=jobdef_common_1, remoteci_id=remoteci_hp_1, team_id=team_hp, created_at=time[0][1], updated_at=time[0][1], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_common_1) job_id = db_ins(models.JOBS, status='new', jobdefinition_id=jobdef_common_1, remoteci_id=remoteci_hp_1, team_id=team_hp, created_at=time[0][2], updated_at=time[0][2], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_common_1) job_id = db_ins(models.JOBS, status='pre-run', jobdefinition_id=jobdef_common_1, remoteci_id=remoteci_hp_1, team_id=team_hp, created_at=time[0][2], updated_at=time[0][1], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_common_1) job_id = db_ins(models.JOBS, status='pre-run', jobdefinition_id=jobdef_common_2, remoteci_id=remoteci_hp_1, team_id=team_hp, created_at=time[0][3], updated_at=time[0][1], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_common_2) job_id = db_ins(models.JOBS, status='running', jobdefinition_id=jobdef_common_2, remoteci_id=remoteci_hp_1, team_id=team_hp, created_at=time[0][10], updated_at=time[0][3], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_common_2) job_id = db_ins(models.JOBS, status='running', jobdefinition_id=jobdef_common_2, remoteci_id=remoteci_hp_1, team_id=team_hp, created_at=time[0][14], updated_at=time[0][7], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_common_2) job_id = db_ins(models.JOBS, status='post-run', jobdefinition_id=jobdef_hp_1, remoteci_id=remoteci_hp_2, team_id=team_hp, created_at=time[1][0], updated_at=time[0][10], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_hp_1) job_id = db_ins(models.JOBS, status='post-run', jobdefinition_id=jobdef_hp_1, remoteci_id=remoteci_hp_2, team_id=team_hp, created_at=time[0][20], updated_at=time[0][2], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_hp_1) job_id = db_ins(models.JOBS, status='failure', jobdefinition_id=jobdef_hp_1, remoteci_id=remoteci_hp_2, team_id=team_hp, created_at=time[2][10], updated_at=time[1][3], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_hp_1) job_id = db_ins(models.JOBS, status='failure', jobdefinition_id=jobdef_hp_2, remoteci_id=remoteci_hp_2, team_id=team_hp, created_at=time[1][1], updated_at=time[0][0], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_hp_2) job_id = db_ins(models.JOBS, status='success', jobdefinition_id=jobdef_hp_2, remoteci_id=remoteci_hp_2, team_id=team_hp, created_at=time[3][12], updated_at=time[2][20], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_hp_2) job_id = db_ins(models.JOBS, status='success', jobdefinition_id=jobdef_hp_2, remoteci_id=remoteci_hp_2, team_id=team_hp, created_at=time[3][20], updated_at=time[0][6], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_hp_2) job_id = db_ins(models.JOBS, status='killed', jobdefinition_id=jobdef_hp_2, remoteci_id=remoteci_hp_1, team_id=team_hp, created_at=time[1][8], updated_at=time[0][1], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_hp_2) job_id = db_ins(models.JOBS, status='killed', jobdefinition_id=jobdef_hp_2, remoteci_id=remoteci_hp_2, team_id=team_hp, created_at=time[2][12], updated_at=time[1][6], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_hp_2) job_id = db_ins(models.JOBS, status='new', jobdefinition_id=jobdef_common_1, remoteci_id=remoteci_dell_1, team_id=team_dell, created_at=time[0][1], updated_at=time[0][1], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_common_1) job_id = db_ins(models.JOBS, status='new', jobdefinition_id=jobdef_common_1, remoteci_id=remoteci_dell_1, team_id=team_dell, created_at=time[0][2], updated_at=time[0][2], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_common_1) job_id = db_ins(models.JOBS, status='pre-run', jobdefinition_id=jobdef_common_1, remoteci_id=remoteci_dell_1, team_id=team_dell, created_at=time[0][2], updated_at=time[0][1], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_common_1) job_id = db_ins(models.JOBS, status='pre-run', jobdefinition_id=jobdef_common_2, remoteci_id=remoteci_dell_1, team_id=team_dell, created_at=time[0][3], updated_at=time[0][1], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_common_2) job_id = db_ins(models.JOBS, status='running', jobdefinition_id=jobdef_common_2, remoteci_id=remoteci_dell_1, team_id=team_dell, created_at=time[0][10], updated_at=time[0][3], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_common_2) job_id = db_ins(models.JOBS, status='running', jobdefinition_id=jobdef_common_2, remoteci_id=remoteci_dell_1, team_id=team_dell, created_at=time[0][14], updated_at=time[0][7], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_common_2) job_id = db_ins(models.JOBS, status='post-run', jobdefinition_id=jobdef_dell_1, remoteci_id=remoteci_dell_2, team_id=team_dell, created_at=time[1][0], updated_at=time[0][10], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_dell_1) job_id = db_ins(models.JOBS, status='post-run', jobdefinition_id=jobdef_dell_1, remoteci_id=remoteci_dell_2, team_id=team_dell, created_at=time[0][20], updated_at=time[0][2], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_dell_1) job_dell_9 = db_ins(models.JOBS, status='failure', jobdefinition_id=jobdef_dell_1, remoteci_id=remoteci_dell_2, team_id=team_dell, created_at=time[2][10], updated_at=time[1][3], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_dell_9, component_id=component_dell_1) job_dell_10 = db_ins(models.JOBS, status='failure', jobdefinition_id=jobdef_dell_2, remoteci_id=remoteci_dell_2, team_id=team_dell, created_at=time[1][1], updated_at=time[0][0], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_dell_10, component_id=component_dell_2) job_dell_11 = db_ins(models.JOBS, status='success', jobdefinition_id=jobdef_dell_2, remoteci_id=remoteci_dell_2, team_id=team_dell, created_at=time[3][12], updated_at=time[2][20], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_dell_11, component_id=component_dell_2) job_dell_12 = db_ins(models.JOBS, status='success', jobdefinition_id=jobdef_dell_2, remoteci_id=remoteci_dell_2, team_id=team_dell, created_at=time[3][20], updated_at=time[0][0], configuration=STACK_DETAILS, user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_dell_12, component_id=component_dell_2) job_id = db_ins(models.JOBS, status='killed', jobdefinition_id=jobdef_dell_2, remoteci_id=remoteci_dell_1, team_id=team_dell, created_at=time[1][4], updated_at=time[0][3], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_dell_2) job_id = db_ins(models.JOBS, status='killed', jobdefinition_id=jobdef_dell_2, remoteci_id=remoteci_dell_2, team_id=team_dell, created_at=time[2][8], updated_at=time[1][2], user_agent='python-dciclient_0.1.0') db_ins(models.JOIN_JOBS_COMPONENTS, job_id=job_id, component_id=component_dell_2) # Creates jobstates attached to jobs, just create a subset of them to # avoid explosion of complexity # DELL Job 9 db_ins(models.JOBSTATES, status='new', team_id=team_dell, created_at=time[2][10], job_id=job_dell_9) db_ins(models.JOBSTATES, status='pre-run', team_id=team_dell, created_at=time[2][1], job_id=job_dell_9) db_ins(models.JOBSTATES, status='running', team_id=team_dell, created_at=time[2][0], job_id=job_dell_9) db_ins(models.JOBSTATES, status='post-run', team_id=team_dell, created_at=time[1][5], job_id=job_dell_9) db_ins(models.JOBSTATES, status='failure', team_id=team_dell, created_at=time[1][3], job_id=job_dell_9) # DELL Job 10 db_ins(models.JOBSTATES, status='new', team_id=team_dell, created_at=time[1][1], job_id=job_dell_10) db_ins(models.JOBSTATES, status='pre-run', team_id=team_dell, created_at=time[1][0], job_id=job_dell_10) db_ins(models.JOBSTATES, status='running', team_id=team_dell, created_at=time[0][23], job_id=job_dell_10) db_ins(models.JOBSTATES, status='running', team_id=team_dell, created_at=time[0][15], job_id=job_dell_10) db_ins(models.JOBSTATES, status='running', team_id=team_dell, created_at=time[0][11], job_id=job_dell_10) db_ins(models.JOBSTATES, status='post-run', team_id=team_dell, created_at=time[0][2], job_id=job_dell_10) db_ins(models.JOBSTATES, status='post-run', team_id=team_dell, created_at=time[0][1], job_id=job_dell_10) db_ins(models.JOBSTATES, status='failure', team_id=team_dell, created_at=time[0][0], job_id=job_dell_10) # Dell Job 11 db_ins(models.JOBSTATES, status='new', team_id=team_dell, created_at=time[3][12], job_id=job_dell_11) db_ins(models.JOBSTATES, status='pre-run', team_id=team_dell, created_at=time[3][11], job_id=job_dell_11) db_ins(models.JOBSTATES, status='pre-run', team_id=team_dell, created_at=time[3][10], job_id=job_dell_11) db_ins(models.JOBSTATES, status='running', team_id=team_dell, created_at=time[3][1], job_id=job_dell_11) db_ins(models.JOBSTATES, status='post-run', team_id=team_dell, created_at=time[2][22], job_id=job_dell_11) db_ins(models.JOBSTATES, status='success', team_id=team_dell, created_at=time[2][20], job_id=job_dell_11) # DELL Job 12 db_ins(models.JOBSTATES, status='new', team_id=team_dell, created_at=time[3][20], job_id=job_dell_12) db_ins(models.JOBSTATES, status='pre-run', team_id=team_dell, created_at=time[3][15], job_id=job_dell_12) db_ins(models.JOBSTATES, status='running', team_id=team_dell, created_at=time[3][14], job_id=job_dell_12) db_ins(models.JOBSTATES, status='running', team_id=team_dell, created_at=time[3][2], job_id=job_dell_12) db_ins(models.JOBSTATES, status='running', team_id=team_dell, created_at=time[2][18], job_id=job_dell_12) db_ins(models.JOBSTATES, status='running', team_id=team_dell, created_at=time[1][5], job_id=job_dell_12) db_ins(models.JOBSTATES, status='running', team_id=team_dell, created_at=time[1][0], job_id=job_dell_12) db_ins(models.JOBSTATES, status='running', team_id=team_dell, created_at=time[0][22], job_id=job_dell_12) db_ins(models.JOBSTATES, status='running', team_id=team_dell, created_at=time[0][13], job_id=job_dell_12) db_ins(models.JOBSTATES, status='post-run', team_id=team_dell, created_at=time[0][12], job_id=job_dell_12) db_ins(models.JOBSTATES, status='post-run', team_id=team_dell, created_at=time[0][10], job_id=job_dell_12) job_dell_12_12 = db_ins(models.JOBSTATES, status='success', team_id=team_dell, created_at=time[0][0], job_id=job_dell_12) # create files only for the last job i.e: dell_12 f_id = db_ins(models.FILES, name='', mime='application/junit', created_at=time[0][0], team_id=team_dell, job_id=job_dell_12) swift = dci_config.get_store('files') if file: file_path = swift.build_file_path(team_dell, job_dell_12, f_id) swift.upload(file_path, JUNIT_TEMPEST) f_id2 = db_ins(models.FILES, name='Rally test suite', mime='application/junit', created_at=time[0][0], team_id=team_dell, job_id=job_dell_12) if file: file_path = swift.build_file_path(team_dell, job_dell_12, f_id2) swift.upload(file_path, JUNIT_RALLY) f_id = db_ins(models.FILES, name='foo.txt', mime='text/play', created_at=time[0][0], team_id=team_dell, jobstate_id=job_dell_12_12) if file: file_path = swift.build_file_path(team_dell, job_dell_12, f_id) swift.upload(file_path, 'some content') f_id = db_ins(models.FILES, name='bar.txt', mime='text/play', created_at=time[0][0], team_id=team_dell, jobstate_id=job_dell_12_12) if file: file_path = swift.build_file_path(team_dell, job_dell_12, f_id) swift.upload(file_path, 'some other content')
#!/usr/bin/env python import os import io import tqdm from dci import dci_config from dci.db import models from sqlalchemy import sql conf = dci_config.CONFIG swift = dci_config.get_store('files') engine = dci_config.get_engine(conf).connect() _TABLE = models.FILES # Calculate the total files to sync file_list = os.walk(conf['FILES_UPLOAD_FOLDER']) with tqdm.tqdm(total=sum(1 for _ in file_list)) as pbar: for dirname, dirnames, filenames in os.walk(conf['FILES_UPLOAD_FOLDER']): if not filenames: pbar.update(1) continue for filename in filenames: # Check if file exist in the DB query = sql.select([_TABLE]).where(_TABLE.c.id == filename) result = engine.execute(query) # If not, do not sync, that's an orphan file if result.rowcount == 0: tqdm.tqdm.write("File %s not found, do not sync" % filename)