def parser(projectId, type='clinical'): data = {} experiments_directory = ckg_utils.read_ckg_config( key='experiments_directory') project_directory = os.path.join(experiments_directory, 'PROJECTID/project/') clinical_directory = os.path.join(experiments_directory, 'PROJECTID/clinical/') design_directory = os.path.join(experiments_directory, 'PROJECTID/experimental_design/') project_directory = project_directory.replace('PROJECTID', projectId) clinical_directory = clinical_directory.replace('PROJECTID', projectId) design_directory = design_directory.replace('PROJECTID', projectId) config = builder_utils.get_config(config_name="clinical.yml", data_type='experiments') if type == 'project': project_dfs = project_parser(projectId, config, project_directory) data.update(project_dfs) elif type == 'experimental_design': design_dfs = experimental_design_parser(projectId, config, design_directory) data.update(design_dfs) elif type == 'clinical': clinical_dfs = clinical_parser(projectId, config, clinical_directory) data.update(clinical_dfs) return data
def read_config(): try: ckg_config = ckg_utils.read_ckg_config() cwd = os.path.dirname(os.path.abspath(__file__)) path = os.path.join(cwd, 'connector_config.yml') config = ckg_utils.get_configuration(path) log_config = ckg_config['graphdb_connector_log'] logger = builder_utils.setup_logging(log_config, key="connector") return config except Exception as err: logger.error("Reading configuration > {}.".format(err))
def parser(projectId, type='proteomics', directory=None): data = {} experiments_directory = ckg_utils.read_ckg_config( key='experiments_directory') config = builder_utils.get_config(config_name="proteomics.yml", data_type='experiments') if directory is None: directory = os.path.join(experiments_directory, 'PROJECTID/' + type) directory = directory.replace('PROJECTID', projectId) data = parse_from_directory(projectId, directory, config) return data
def build_page(self): """ Builds project and generates the report. For each data type in the report (e.g. 'proteomics', 'clinical'), \ creates a designated tab. A button to download the entire project and report is added. """ config_files = {} tmp_dir = ckg_utils.read_ckg_config(key='tmp_directory') if os.path.exists(tmp_dir): directory = os.path.join(tmp_dir, self.id) if os.path.exists(directory): config_files = { f.split('.')[0]: os.path.join(directory, f) for f in os.listdir(directory) if os.path.isfile(os.path.join(directory, f)) } result = generate_project_report.apply_async( args=[self.project_id, config_files, self.force], task_id='generate_report' + self.session_id, queue='compute') result_output = result.get() print(result_output) p = project.Project(self.project_id, datasets={}, knowledge=None, report={}, configuration_files=config_files) p.build_project(False) if p.name is not None: self.title = "Project: {}".format(p.name) else: self.title = '' self.add_basic_layout() plots = p.show_report("app") p = None tabs = [] buttons = self.build_header() self.add_to_layout(buttons) for data_type in plots: if len(plots[data_type]) >= 1: tab_content = [html.Div(plots[data_type])] tab = dcc.Tab(tab_content, label=data_type) tabs.append(tab) lc = dcc.Tabs(tabs) self.add_to_layout(lc)
def get_config(config_name, data_type='databases'): """ Reads YAML configuration file and converts it into a Python dictionary. :param str config_name: name of the configuration YAML file. :param str data_type: configuration type ('databases' or 'ontologies'). :return: Dictionary. .. note:: Use this function to obtain configuration for individual database/ontology parsers. """ directory = os.path.join(ckg_utils.read_ckg_config(key='ckg_directory'), 'graphdb_builder') config = ckg_utils.get_configuration(os.path.join(directory, '{}/config/{}'.format(data_type, config_name))) return config
def setup_config(data_type="databases"): """ Reads YAML configuration file and converts it into a Python dictionary. :param data_type: configuration type ('databases', 'ontologies', 'experiments' or 'builder'). :return: Dictionary. .. note:: This function should be used to obtain the configuration for databases_controller.py, \ ontologies_controller.py, experiments_controller.py and builder.py. """ try: dirname = os.path.join(ckg_utils.read_ckg_config(key='ckg_directory'), 'graphdb_builder') file_name = '{}/{}_config.yml'.format(data_type, data_type) config = ckg_utils.get_configuration(os.path.join(dirname, file_name)) except Exception as err: raise Exception("builder_utils - Reading configuration > {}.".format(err)) return config
def send_message_to_slack_webhook(message, message_to, username='******'): webhook_file = os.path.join(ckg_utils.read_ckg_config(key='ckg_directory'), "config/wh.txt") if os.path.exists(webhook_file): with open(webhook_file, 'r') as hf: webhook_url = hf.read() post = { "text": "@{} : {}".format(message_to, message), "username": username, "icon_url": "https://slack.com/img/icons/app-57.png" } try: json_data = json.dumps(post) req = request.Request(webhook_url, data=json_data.encode('ascii'), headers={'Content-Type': 'application/json'}) resp = request.urlopen(req) except Exception as em: print("EXCEPTION: " + str(em))
import os.path import sys from ckg import ckg_utils from ckg.graphdb_builder import builder_utils from ckg.graphdb_builder.databases.parsers import * from joblib import Parallel, delayed from datetime import date try: ckg_config = ckg_utils.read_ckg_config() log_config = ckg_config['graphdb_builder_log'] logger = builder_utils.setup_logging(log_config, key="database_controller") dbconfig = builder_utils.setup_config('databases') except Exception as err: logger.error("Reading configuration > {}.".format(err)) def parseDatabase(importDirectory, database, download=True): stats = set() updated_on = None if download: updated_on = str(date.today()) try: logger.info("Parsing database {}".format(database)) database_directory = ckg_config['databases_directory'] if database.lower() == "jensenlab": result = jensenlabParser.parser(database_directory, download) for qtype in result: relationships, header, outputfileName = result[qtype] outputfile = os.path.join(importDirectory, outputfileName) builder_utils.write_relationships(relationships, header,
def buildPage(self): """ Builds page with the basic layout from *basicApp.py* and adds all the relevant plots from *imports.py*. """ plots = [] self.add_basic_layout() stats_dir = ckg_utils.read_ckg_config(key='stats_directory') stats_file = os.path.join(stats_dir, "stats.hdf") if os.path.exists(stats_file): stats_df = imports.get_stats_data(stats_file, n=3) plots.append( imports.plot_total_number_imported( stats_df, 'Number of imported entities and relationships')) plots.append( imports.plot_total_numbers_per_date( stats_df, 'Imported entities vs relationships')) plots.append( imports.plot_databases_numbers_per_date( stats_df, 'Full imports: entities/relationships per database', key='full', dropdown=True, dropdown_options='dates')) plots.append( imports.plot_databases_numbers_per_date( stats_df, 'Partial imports: entities/relationships per database', key='partial', dropdown=True, dropdown_options='dates')) plots.append( imports.plot_import_numbers_per_database( stats_df, 'Full imports: Breakdown entities/relationships', key='full', subplot_titles=('Entities imported', 'Relationships imported', 'File size', 'File size'), colors=True, plots_1='entities', plots_2='relationships', dropdown=True, dropdown_options='databases')) plots.append( imports.plot_import_numbers_per_database( stats_df, 'Partial imports: Breakdown entities/relationships', key='partial', subplot_titles=('Entities imported', 'Relationships imported', 'File size', 'File size'), colors=True, plots_1='entities', plots_2='relationships', dropdown=True, dropdown_options='databases')) else: plots.append( viz.get_markdown( text="# There are no statistics about recent imports.")) self.extend_layout(plots)