def start_pipeline_instances_via_pipeline_id(self, pipeline_id, json_data=None): for session in get_db_session(): return self.create_pipeline_instance(pipeline_id, json_data, in_session=session)
def get_pipeline_instance_by_id(self, pipeline_instance_id, session=None): if session is None: for db_session in get_db_session(): return db_session.query(PipelineInstance).get( pipeline_instance_id).serialize() else: return session.query(PipelineInstance).get(pipeline_instance_id)
def analyze_tests(self, pipeline_instance_id, json): """ Analyze test results :param pipeline_instance_id: :type pipeline_instance_id: int :param json: :type json: dict :return: :rtype: dict """ if json: # Deal with Areas # Deal with Features # Deal with Behavior Points for session in get_db_session(): product = session.query(QaProduct) \ .join(Vcs) \ .join(PipelineInstance, PipelineInstance.pipeline_id == Vcs.pipeline_id) \ .filter(PipelineInstance.id == pipeline_instance_id).first() if product: area_mapper = self._get_area_mapper( session, product.id, json) return area_mapper return {}
def get_workable_work_requests(self): results = [] for session in get_db_session(): work_requests = {} self._get_ready_work_requests(session, work_requests, results) self._get_stalled_work_requests(session, work_requests, results) return results
def get_status_by_name(self, name, session=None): if session is not None: return session.query(Status).filter( func.lower(Status.name) == name.lower()).first() for new_session in get_db_session(): status = new_session.query(Status).filter( func.lower(Status.name) == name.lower()).first() return status.serialize() if status else None
def delete_object(self, endpoint, _id): if self._is_valid(endpoint): clazz = self.class_map[endpoint] for session in get_db_session(): dal = self._retrieve_dal(clazz) instance = dal.delete_object(session, clazz, _id) return Response(json.dumps(instance.serialize()), content_type='application/json') return Response(status=404)
def get_by_identifier(self, commit_identifier): for session in get_db_session(): try: commit = session.query(Commit).filter( Commit.commit_identifier == commit_identifier).first() return commit.serialize except AttributeError: pass return None
def get_step_by_release_and_step_id(self, release_id, step_id, in_session=None): if in_session is not None: return in_session.query(Step).filter(Step.release_id == release_id)\ .filter(Step.custom_id == step_id).first() for session in get_db_session(): return session.query(Step).filter(Step.release_id == release_id) \ .filter(Step.custom_id == step_id).first().serialize()
def get_vcs_by_pipeline_id(self, pipeline_id, in_session=None): if in_session: vcs = in_session.query(Vcs).filter( Vcs.pipeline_id == pipeline_id).first() return vcs.serialize() if vcs is not None else None for session in get_db_session(): vcs = session.query(Vcs).filter( Vcs.pipeline_id == pipeline_id).first() return vcs.serialize() if vcs is not None else None
def get_release_by_commit_identifier(self, commit_identifier, in_session=None): if in_session is None: for session in get_db_session(): release = session.query(Release).filter(Release.commit_id == Commit.id)\ .filter(Commit.commit_identifier == commit_identifier).first() return release.serialize() if release is not None else None else: return in_session.query(Release).filter( Release.commit_id == commit_identifier).first()
def get_workable_work_requests(self): results = [] for session in get_db_session(): app_configuration = session.query(AppConfiguration).get(1) if not app_configuration or app_configuration.process_queue: work_requests = {} self._get_ready_work_requests(session, work_requests, results) self._get_stalled_work_requests(session, work_requests, results) return results
def list_single_object(self, endpoint, _id): if self._is_valid(endpoint): for session in get_db_session(): clazz = self.class_map[endpoint] query = session.query(clazz).filter(clazz.id == _id) allowed_fields = self._get_additional_fields(clazz) instance = query.one() return Response(json.dumps( instance.serialize(allowed_children=allowed_fields)), content_type='application/json') return Response("Not Valid", status=404)
def get_pipeline_events_by_pipeline_id(self, pipeline_id, session=None): if session is None: for db_session in get_db_session(): return [ event.serialize() for event in db_session.query(PipelineEvent).filter( PipelineEvent.pipeline_id == pipeline_id).all() ] else: return session.query(PipelineEvent).filter( PipelineEvent.pipeline_id == pipeline_id).all()
def create_pipeline_instance(self, pipeline_id, json_data=None, vcs_id=None, in_session=None): if in_session: return self._process_pipeline(pipeline_id, vcs_id, json_data, in_session) for session in get_db_session(): return self._process_pipeline(pipeline_id, vcs_id, json_data, session)
def get_verify_working(self, time_difference): results = [] for session in get_db_session(): for action_instance in session.query(ActionInstance).join(PipelineInstance) \ .filter(PipelineInstance.status_id == StatusConstants.INPROGRESS) \ .filter(ActionInstance.status_id == StatusConstants.INPROGRESS) \ .filter(ActionInstance.assigned_to != '') \ .filter(ActionInstance.start_date <= datetime.datetime.utcnow() - datetime.timedelta(minutes=time_difference)) \ .filter(ActionInstance.end_date.is_(None)) \ .filter(ActionInstance.manual == 0).all(): results.append(action_instance.serialize()) return results
def reconcile_releases(self): releases = [] for session in get_db_session(): for release in session.query(Release).join(Step).filter( and_( Release.status_id < StatusConstants.SUCCESS, ~exists().where( Step.status_id < StatusConstants.SUCCESS))).all(): release.status_id = StatusConstants.SUCCESS releases.append(release.serialize()) session.commit() return releases
def get_action_instance_by_id(self, _id, session=None): """ Get ActionInstance by ID :param _id: long ID :param session: Possible db session to use :return: Action Instance :rtype ActionInstance """ if session is None: for new_session in get_db_session(): return new_session.query(ActionInstance).get(_id).serialize() return session.query(ActionInstance).get(_id)
def complete_action_instance(self, _id, post_data): for session in get_db_session(): action_instance = self.get_action_instance_by_id(_id, session) action_instance.end_date = datetime.datetime.utcnow() self.store_service.set_completing(_id) self._save_statistics(action_instance, session, post_data) self._save_status_information(action_instance, session, post_data) self.store_service.clear_completing(_id) session.commit() return True
def reset_pipeline_instance(self, pipeline_instance_id): for session in get_db_session(): pipeline_instance = session.query(PipelineInstance)\ .options( joinedload(PipelineInstance.stage_instances) .joinedload(StageInstance.workflow_instances) .joinedload(WorkflowInstance.action_instances)).filter(PipelineInstance.id == pipeline_instance_id).first() instance_workflow_engine = InstanceWorkflowEngine( self.status_dal, pipeline_instance) instance_workflow_engine.reset_pipeline() session.commit() return True
def get_work_request_by_action_instance_id(self, action_instance_id): for session in get_db_session(): work_requests = {} results = [] for action_instance, pipeline_parameters in session.query(ActionInstance, PipelineParameters) \ .outerjoin(PipelineParameters, PipelineParameters.pipeline_instance_id == ActionInstance.pipeline_instance_id)\ .filter(ActionInstance.id == action_instance_id).all(): self.configure_work_request(action_instance, pipeline_parameters, work_requests, results, include_configuration=False) return results[0].__dict__ return None
def cancel_pipeline_instance(self, pipeline_instance_id): for session in get_db_session(): pipeline_instance = self.get_pipeline_instance_by_id(pipeline_instance_id, session) if pipeline_instance: for action_instance in pipeline_instance.action_instances: for client in StoreService.get_clients(self.app).values(): if action_instance.status_id <= StatusConstants.SUCCESS and client.get_uri() == action_instance.assigned_to: self.queue_constants.cancel_worker(action_instance.serialize()) pipeline_instance.status_id = StatusConstants.CANCELED pipeline_instance.end_date = datetime.datetime.utcnow() session.commit() else: raise InvalidObjectException("Pipeline Instance not found", 404) return {"message": "Running clients have been canceled and pipeline canceled."}
def create_version_for_commit(self, commit_identifier, data_version): for session in get_db_session(): commit = session.query(Commit).filter( Commit.commit_identifier == commit_identifier).first() if commit: version = session.query(Version).filter( Version.name == data_version).first() if version: raise DatabaseException("Version already exists", code=400) version = Version(commit_id=commit.id, name=data_version) session.add(version) session.commit() return version.serialize() raise DatabaseException("Invalid commit", code=404)
def mark_step_for_commit(self, commit_identifier, step_custom_id, status_name): commit = self.ci_module.get_by_identifier(commit_identifier) if commit is not None: for session in get_db_session(): release = self.get_release_by_commit_identifier( commit_identifier) if release is not None: step = self.set_release_step(release.id, step_custom_id, status_name, session) session.commit() return step logger.debug("Step is not the head of the release.") return None
def create_git_commit(self, commit_identifier, vcs_id, additional_info=None, pipeline_instance_id=None, in_session=None): if in_session is None: for session in get_db_session(): self._create_git_commit(commit_identifier, additional_info, pipeline_instance_id, session, True, vcs_id) else: self._create_git_commit(commit_identifier, additional_info, pipeline_instance_id, in_session, False, vcs_id)
def cancel_action_instance(self, action_instance_id): for session in get_db_session(): action_instance = self.get_action_instance_by_id( action_instance_id, session) if action_instance: serialized = action_instance.serialize() instance_workflow_engine = InstanceWorkflowEngine( StatusDal(session), action_instance.pipeline_instance) instance_workflow_engine.complete_an_action( action_instance_id, StatusConstants.CANCELED) self.queue_constants.cancel_worker(serialized) session.commit() else: raise InvalidObjectException("Action Instance not found", 404) return {"message": "Action Instance has been canceled."}
def get_test_results(self, action_instance_id, filters=None, in_session=None): results = {} if in_session is None: for session in get_db_session(): query = session.query(QaTestHistory).options( joinedload(QaTestHistory.test)).options( joinedload(QaTestHistory.status)).options( joinedload(QaTestHistory.stacktrace)) \ .filter(QaTestHistory.action_instance_id == action_instance_id) query = ORMUtil.get_filtered_query(query, filters, QaTestHistory) for test_history in query.all(): status = test_history.status.name results_array = [] if status not in results: results[status] = results_array else: results_array = results[status] results_array.append( test_history.serialize({ QaTestHistory.__tablename__: ['test', 'status', 'stacktrace'] })) return results else: query = in_session.query(QaTestHistory).options( joinedload(QaTestHistory.test)).options( joinedload(QaTestHistory.status)).options( joinedload(QaTestHistory.stacktrace)) \ .filter(QaTestHistory.action_instance_id == action_instance_id) query = ORMUtil.get_filtered_query(query, filters, QaTestHistory) for test_history in query.all(): status = test_history.status.name results_array = [] if status not in results: results[status] = results_array else: results_array = results[status] results_array.append(test_history.serialize(['test', 'status'])) return results return results
def list(self, endpoint): if self._is_valid(endpoint): for session in get_db_session(): clazz = self._get_clazz(endpoint) query = self._get_query(session, clazz) query = self._set_filter(clazz, query) query = self._set_orderby_direction(query, clazz) query = self._set_joins(query) query = self._set_limit(query) fields = self._get_additional_fields(clazz) results = [] for result in query.all(): results.append(result.serialize(fields)) return Response(json.dumps(results), content_type='application/json') else: return Response(status=404)
def reset_action_instance(self, _id, complete_reset=False, check_status=False): # pylint: disable=unused-argument for session in get_db_session(): action_instance = session.query(ActionInstance).get(_id) if check_status and action_instance.status_id != StatusConstants.INPROGRESS: return False instance_workflow_engine = InstanceWorkflowEngine( self.status_dal, action_instance.pipeline_instance) instance_workflow_engine.reset_action(action_instance) if self.qa_module is not None: self.qa_module.reset_results(action_instance.id, session) session.commit() return True
def get_qa_testmap_coverage(self, pipeline_instance_id): objects = [] for session in get_db_session(): query = session.query(QaTestMapping) \ .options(joinedload(QaTestMapping.feature, innerjoin=True)) \ .options(joinedload(QaTestMapping.behavior_point, innerjoin=True)) \ .options(joinedload(QaTestMapping.test, innerjoin=True)) \ .options(joinedload(QaTestMapping.area, innerjoin=True)) \ .join(PipelineInstance, PipelineInstance.id == pipeline_instance_id) \ .join(Vcs, Vcs.pipeline_id == PipelineInstance.pipeline_id) \ .join(QaProduct, QaProduct.vcs_id == Vcs.id) \ .join(QaArea, and_(QaArea.product_id == QaProduct.id, QaArea.id == QaTestMapping.area_id)) objects = ResultsSerializer.serialize_results( query.all(), { QaTestMapping.__tablename__: ['area', 'feature', 'behavior_point', 'test'] }) return objects
def get_release_by_id(self, release_id, in_session=None, with_for_update=False): if in_session is None: for session in get_db_session(): if with_for_update: return session.query(Release).with_for_update().filter( Release.id == release_id).first().serialize() else: return session.query(Release).with_for_update().filter( Release.id == release_id).first().serialize() else: if with_for_update: return in_session.query(Release).with_for_update().filter( Release.id == release_id).first() else: return in_session.query(Release).with_for_update().filter( Release.id == release_id).first()