from joblib import Parallel, delayed from uuid import uuid4 import config.ckg_config as ckg_config from graphdb_builder.ontologies import ontologies_controller as oh from graphdb_builder.databases import databases_controller as dh from graphdb_builder.experiments import experiments_controller as eh from graphdb_builder.users import users_controller as uh from graphdb_builder import builder_utils log_config = ckg_config.graphdb_builder_log logger = builder_utils.setup_logging(log_config, key="importer") import_id = uuid4() try: cwd = os.path.abspath(os.path.dirname(__file__)) config = builder_utils.setup_config('builder') directories = builder_utils.get_full_path_directories() oconfig = builder_utils.setup_config('ontologies') dbconfig = builder_utils.setup_config('databases') econfig = builder_utils.setup_config('experiments') uconfig = builder_utils.setup_config('users') except Exception as err: logger.error("importer - Reading configuration > {}.".format(err)) START_TIME = datetime.now() def ontologiesImport(importDirectory, ontologies=None, download=True, import_type="partial"):
from dash.exceptions import PreventUpdate from app import app, server as application from apps import initialApp, projectCreationApp, dataUploadApp, dataUpload, projectApp, importsApp, homepageApp, loginApp, projectCreation from graphdb_builder import builder_utils from graphdb_builder.builder import loader from graphdb_builder.experiments import experiments_controller as eh from report_manager import utils import config.ckg_config as ckg_config from worker import create_new_project, create_new_identifiers from graphdb_connector import connector log_config = ckg_config.report_manager_log logger = builder_utils.setup_logging(log_config, key="index page") try: config = builder_utils.setup_config('builder') directories = builder_utils.get_full_path_directories() except Exception as err: logger.error("Reading configuration > {}.".format(err)) cwd = os.path.abspath(os.path.dirname(__file__)) experimentDir = os.path.join(directories['dataDirectory'], 'experiments') experimentsImportDir = directories['experimentsDirectory'] tmpDirectory = directories['tmpDirectory'] driver = connector.getGraphDatabaseConnectionConfiguration() separator = config["separator"] app.layout = dcc.Loading(children=[ html.Div([ dcc.Location(id='url', refresh=False), html.Div(id='page-content',
The module can perform full updates, executing both steps for all the ontologies, databases and experiments or a partial update. Partial updates can execute step 1 or step 2 for specific data. """ import argparse from graphdb_builder.builder import importer, loader import config.ckg_config as ckg_config from graphdb_builder import builder_utils log_config = ckg_config.graphdb_builder_log logger = builder_utils.setup_logging(log_config, key="builder") try: config = builder_utils.setup_config('builder') directories = builder_utils.get_full_path_directories() dbconfig = builder_utils.setup_config('databases') oconfig = builder_utils.setup_config('ontologies') except Exception as err: logger.error("builder - Reading configuration > {}.".format(err)) def run_minimal_update(user, n_jobs=3): licensed_dbs = ['phosphositeplus', 'drugbank'] licensed_ont = ['Clinical_variable'] mapping_ont = ['Disease', 'Gene_ontology', 'Experimental_factor'] minimal_load = ['ontologies', 'modified_proteins', 'drugs', 'mentions', 'side effects', 'clinical_variants', 'project', 'experiment'] logger.info("The user {} chose to perform a minimal build, after creating the database from a dump".format(user)) logger.info("Building database > step 1: Importing licensed ontologies and databases") importer.ontologiesImport(importDirectory=directories['importDirectory'], ontologies=licensed_ont, download=False)
import os.path import sys import config.ckg_config as ckg_config from graphdb_builder import builder_utils from graphdb_builder.databases.parsers import * from joblib import Parallel, delayed from datetime import date log_config = ckg_config.graphdb_builder_log logger = builder_utils.setup_logging(log_config, key="database_controller") try: dbconfig = builder_utils.setup_config('databases') except Exception as err: logger.error("Reading configuration > {}.".format(err)) def parseDatabase(importDirectory, database, download=True): stats = set() updated_on = None if download: updated_on = str(date.today()) try: logger.info("Parsing database {}".format(database)) if database.lower() == "jensenlab": result = jensenlabParser.parser(dbconfig["databasesDir"], download) for qtype in result: relationships, header, outputfileName = result[qtype] outputfile = os.path.join(importDirectory, outputfileName) builder_utils.write_relationships(relationships, header, outputfile) logger.info("Database {} - Number of {} relationships: {}".format(database, qtype, len(relationships)))
import os import sys import pandas as pd import numpy as np import plotly.graph_objs as go import dash_core_components as dcc import dash_html_components as html import ckg_utils from graphdb_connector import connector from graphdb_builder import builder_utils from analytics_core.viz import viz from analytics_core import utils try: cwd = os.path.abspath(os.path.dirname(__file__)) config = builder_utils.setup_config('experiments') driver = connector.getGraphDatabaseConnectionConfiguration() except Exception as err: raise Exception("Reading configuration > {}.".format(err)) def size_converter(value): """ Converts a given value to the highest possible unit, maintaining two decimals. :param int or float value: :return: String with converted value and units. """ unit = 'KB' val = np.round(value * 0.001, 2) if len(str(val).split('.')[0]) > 3:
import os import sys import argparse import pandas as pd from datetime import datetime, timedelta from passlib.hash import bcrypt import config.ckg_config as ckg_config from graphdb_connector import connector from graphdb_builder import builder_utils from graphdb_builder.users import users_controller as uh log_config = ckg_config.graphdb_builder_log logger = builder_utils.setup_logging(log_config, key='user_creation') try: config = builder_utils.setup_config('builder') directories = builder_utils.get_full_path_directories() uconfig = builder_utils.setup_config('users') except Exception as err: logger.error("Reading configuration > {}.".format(err)) cwd = os.path.abspath(os.path.dirname(__file__)) def create_user_from_dict(driver, data): """ Creates graph database node for new user and adds properties to the node. :param driver: neo4j driver, which provides the connection to the neo4j graph database. :param dict data: dictionary with the user information). """
from graphdb_builder import mapping as mp, builder_utils import config.ckg_config as ckg_config from graphdb_builder.ontologies.parsers import * import os.path import pandas as pd import csv from datetime import date import sys log_config = ckg_config.graphdb_builder_log logger = builder_utils.setup_logging(log_config, key="ontologies_controller") try: config = builder_utils.setup_config('ontologies') except Exception as err: logger.error("Reading configuration > {}.".format(err)) def entries_to_remove(entries, the_dict): """ This function removes pairs from a given dictionary, based on a list of provided keys. :param list entries: list of keys to be deleted from dictionary. :param dict the_dict: dictionary. :return: The original dictionary minus the key,value pairs from the provided entries list. """ for key in entries: if key in the_dict: del the_dict[key]