def get_pipelines_for_dashboard(): ''' Get all pipelines available for displaying in dashboard ''' database = connect_db() pipeline_documents = Database.get_documents_by_type(database, doc_type='project') pipelines_data = list() jenkins_obj = connect_jenkins() for pipeline in pipeline_documents: last_build_status = jenkins_obj.get_last_build_status( job_name=pipeline['name']) scm, jenkins = "dummy-url", "dummy-url" if 'scm_url' in pipeline: scm = pipeline['scm_url'] if 'jenkins_url' in pipeline: kube = KubernetesAPI() external_url = kube.get_service_url('build-at-scale-jenkins') jenkins = "http://%s/job/%s" % (external_url, pipeline['name']) pipelines_data.append({ 'pipeline_name': pipeline['name'], 'scm_url': scm, 'jenkins_url': jenkins, 'last_build': last_build_status }) return pipelines_data
def purge_old_workspaces(): """ Purge workspaces older than X days @return: count of workspaces deleted """ database = helpers.connect_db() config = helpers.get_db_config() projects_in_db = Database.get_documents_by_type(database, doc_type='project') if not projects_in_db: return 0 count = 0 deleted_workspaces = list() for project in projects_in_db: workspaces_in_project = Database.get_workspaces_by_project(database, project=project['name']) for workspace in workspaces_in_project: # ontap doesn't provide last_access_timestamp for volumes # hence, snapdiff latest snapshot with snapshot X days older \ # to find if workspace is active ontap = OntapService(config['ontap_api'], config['ontap_apiuser'], config['ontap_apipass'], config['ontap_svm_name'], config['ontap_aggr_name'], config['ontap_data_ip']) deleted, error = ontap.get_snapdiff_and_delete( volume_name=workspace.value, count=project['workspace_purge_limit']) # delete inconsistent or old workspace that exceeded purge limit if error is not None or deleted is True: workspace_doc = Database.get_document_by_name(database, workspace.value) database.delete(workspace_doc) deleted_workspaces.append(workspace.value) logging.info("Purge: deleted workspace %s from DB", workspace.value) count += 1 return count, deleted_workspaces
def get_db_user_document(username): """Connect to database and retrieve user document""" database = connect_db() user_document = None users = Database.get_documents_by_type(database, 'user') for user in users: if user['name'] == username: user_document = user return user_document
def get_pipelines(): """ Get all pipelines available """ database = connect_db() pipeline_documents = Database.get_documents_by_type( database, doc_type='project') pipelines = list() for pipeline in pipeline_documents: pipelines.append(pipeline['name']) return pipelines
def get_workspaces(): """ Get information about all workspaces associated with Build@Scale """ db = connect_db() try: workspaces = Database.get_documents_by_type(db, 'workspace') except Exception as e: logging.error("Unable to retrieve workspace documents from database: %s" % traceback.format_exc()) workspaces = [] return workspaces
def purge_ci_snapshots(): """ Purge CI snapshots @return: count of CI snapshots purged """ database = helpers.connect_db() # Get all active projects projects_in_db = Database.get_documents_by_type(database, doc_type="project") if not projects_in_db: return 0 count = 0 # For each project, get all snapshot documents for project in projects_in_db: purge_inconsistent_snapshots(volume=project['volume']) count += purge_snapshots_by_volume(project['volume'], project['ci_purge_limit']) return count
def get_pipelines_for_dashboard(): """ Get all pipelines available for displaying in dashboard """ database = connect_db() pipeline_documents = Database.get_documents_by_type( database, doc_type='project') pipelines_data = list() jenkins_obj = connect_jenkins() for pipeline in pipeline_documents: last_build_status = jenkins_obj.get_last_build_status(job_name=pipeline['name']) # both scm and jenkins URLs are set as part of pipeline_create pipelines_data.append({'pipeline_name': pipeline['name'], 'scm_url': pipeline['scm_url'], 'jenkins_url': pipeline['jenkins_url'], 'last_build': last_build_status}) return pipelines_data