Пример #1
0
def read_config():
    try:
        ckg_config = ckg_utils.read_ckg_config()
        cwd = os.path.dirname(os.path.abspath(__file__))
        path = os.path.join(cwd, 'connector_config.yml')
        config = ckg_utils.get_configuration(path)
        log_config = ckg_config['graphdb_connector_log']
        logger = builder_utils.setup_logging(log_config, key="connector")
        
        return config
    except Exception as err:
        logger.error("Reading configuration > {}.".format(err))
Пример #2
0
import os.path
import sys
from ckg import ckg_utils
from ckg.graphdb_builder import builder_utils
from ckg.graphdb_builder.databases.parsers import *
from joblib import Parallel, delayed
from datetime import date

try:
    ckg_config = ckg_utils.read_ckg_config()
    log_config = ckg_config['graphdb_builder_log']
    logger = builder_utils.setup_logging(log_config, key="database_controller")
    dbconfig = builder_utils.setup_config('databases')
except Exception as err:
    logger.error("Reading configuration > {}.".format(err))


def parseDatabase(importDirectory, database, download=True):
    stats = set()
    updated_on = None
    if download:
        updated_on = str(date.today())
    try:
        logger.info("Parsing database {}".format(database))
        database_directory = ckg_config['databases_directory']
        if database.lower() == "jensenlab":
            result = jensenlabParser.parser(database_directory, download)
            for qtype in result:
                relationships, header, outputfileName = result[qtype]
                outputfile = os.path.join(importDirectory, outputfileName)
                builder_utils.write_relationships(relationships, header,
Пример #3
0
from ckg import ckg_utils
from ckg.graphdb_builder import mapping as mp, builder_utils
from ckg.graphdb_builder.ontologies.parsers import *  # TODO: remove star import
import os.path
import pandas as pd
import csv
from datetime import date
import sys

try:
    ckg_config = ckg_utils.read_ckg_config()
    log_config = ckg_config['graphdb_builder_log']
    logger = builder_utils.setup_logging(log_config,
                                         key="ontologies_controller")
    config = builder_utils.setup_config('ontologies')
except Exception as err:
    logger.error("Reading configuration > {}.".format(err))


def entries_to_remove(entries, the_dict):
    """
    This function removes pairs from a given dictionary, based on a list of provided keys.

    :param list entries: list of keys to be deleted from dictionary.
    :param dict the_dict: dictionary.
    :return: The original dictionary minus the key,value pairs from the provided entries list.
    """
    for key in entries:
        if key in the_dict:
            del the_dict[key]
Пример #4
0
import re
import argparse
import pandas as pd
import numpy as np
from datetime import datetime, timedelta
from passlib.hash import bcrypt
from ckg import ckg_utils
from ckg.graphdb_connector import connector
from ckg.graphdb_builder import builder_utils

try:
    ckg_config = ckg_utils.read_ckg_config()
    cwd = os.path.dirname(os.path.abspath(__file__))
    config = builder_utils.setup_config('users')
    log_config = ckg_config['graphdb_builder_log']
    logger = builder_utils.setup_logging(log_config, key='users_controller')
except Exception as err:
    logger.error("Reading configuration > {}.".format(err))


def parseUsersFile(expiration=365):
    """
    Creates new user in the graph database and corresponding node, through the following steps:

        1. Generates new user identifier
        2. Checks if a user with given properties already exists in the database. If not:
        3. Creates new local user (access to graph database)
        4. Saves data to tab-delimited file.

    :param int expiration: number of days a user is given access.
    :return: Writes relevant .tsv file for the users in the provided file.
Пример #5
0
import sys
import os.path
from ckg.graphdb_builder import builder_utils
from ckg.graphdb_builder.experiments.parsers import clinicalParser, proteomicsParser, wesParser
from ckg import ckg_utils

ckg_config = ckg_utils.read_ckg_config()
log_config = ckg_config['graphdb_builder_log']
logger = builder_utils.setup_logging(log_config, key="experiments_controller")


def generate_dataset_imports(projectId, dataType, dataset_import_dir):
    stats = set()
    builder_utils.checkDirectory(dataset_import_dir)
    try:
        if dataType in ['project', 'experimental_design', 'clinical']:
            data = clinicalParser.parser(projectId, dataType)
            for dtype, ot in data:
                generate_graph_files(data[(dtype, ot)], dtype, projectId, stats, ot, dataset_import_dir)
        elif dataType in ["proteomics", "interactomics", "phosphoproteomics"]:
            data = proteomicsParser.parser(projectId, dataType)
            for dtype, ot in data:
                generate_graph_files(data[(dtype, ot)], dtype, projectId, stats, ot, dataset_import_dir)
        elif dataType == "wes":
            data = wesParser.parser(projectId)
            for dtype, ot in data:
                generate_graph_files(data[(dtype, ot)], dtype, projectId, stats, ot, dataset_import_dir)
        else:
            raise Exception("Error when importing experiment for project {}. Non-existing parser for data type {}".format(projectId, dataType))
    except Exception as err:
        exc_type, exc_obj, exc_tb = sys.exc_info()
Пример #6
0
import os
import sys
import re
import pandas as pd
import numpy as np
from ckg import ckg_utils
from ckg.graphdb_connector import connector
from ckg.graphdb_builder import builder_utils
from ckg.graphdb_connector import query_utils
from ckg.analytics_core.viz import viz

ckg_config = ckg_utils.read_ckg_config()
log_config = ckg_config['graphdb_builder_log']
logger = builder_utils.setup_logging(log_config, key="data_upload")


def get_data_upload_queries():
    """
    Reads the YAML file containing the queries relevant to parsing of clinical data and \
    returns a Python object (dict[dict]).

    :return: Nested dictionary.
    """
    try:
        queries_path = "../queries/data_upload_cypher.yml"
        directory = os.path.dirname(os.path.abspath(__file__))
        data_upload_cypher = ckg_utils.get_queries(os.path.join(directory, queries_path))
    except Exception as err:
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
        logger.error("Error: {}. Reading queries from file {}: {}, file: {},line: {}".format(err, queries_path, sys.exc_info(), fname, exc_tb.tb_lineno))
Пример #7
0
from ckg.report_manager.app import app, server as application
from ckg.report_manager.apps import initialApp, adminApp, projectCreationApp, dataUploadApp, dataUpload, projectApp, importsApp, homepageApp, loginApp, projectCreation
from ckg.graphdb_builder import builder_utils
from ckg.graphdb_builder.builder import loader, builder
from ckg.graphdb_builder.experiments import experiments_controller as eh
from ckg.report_manager import utils
from ckg.report_manager.worker import create_new_project, create_new_identifiers, run_minimal_update_task, run_full_update_task
from ckg.graphdb_connector import connector

warnings.filterwarnings("ignore", category=DeprecationWarning)
warnings.filterwarnings("ignore", category=RuntimeWarning)

try:
    ckg_config = ckg_utils.read_ckg_config()
    log_config = ckg_config['report_manager_log']
    logger = builder_utils.setup_logging(log_config, key="index page")
    config = builder_utils.setup_config('builder')
    separator = config["separator"]
except Exception as err:
    logger.error("Reading configuration > {}.".format(err))

app.layout = dcc.Loading(children=[
    html.Div([
        dcc.Location(id='url', refresh=False),
        html.Div(id='page-content',
                 style={'padding-top': 10},
                 className='container-fluid')
    ])
],
                         style={
                             'text-align': 'center',
Пример #8
0
    The module can perform full updates, executing both steps for all the ontologies,
    databases and experiments or a partial update. Partial updates can execute step 1 or
    step 2 for specific data.

"""

import argparse
from ckg.graphdb_builder.builder import importer, loader
from ckg import ckg_utils
from ckg.graphdb_builder import builder_utils

try:
    ckg_config = ckg_utils.read_ckg_config()
    log_config = ckg_config['graphdb_builder_log']
    logger = builder_utils.setup_logging(log_config, key="builder")
    config = builder_utils.setup_config('builder')
    directories = builder_utils.get_full_path_directories()
    dbconfig = builder_utils.setup_config('databases')
    oconfig = builder_utils.setup_config('ontologies')
except Exception as err:
    logger.error("builder - Reading configuration > {}.".format(err))


def run_minimal_update(user, n_jobs=3):
    licensed_dbs = ['phosphositeplus', 'drugbank']
    licensed_ont = ['Clinical_variable']
    mapping_ont = ['Disease', 'Gene_ontology', 'Experimental_factor']
    minimal_load = ['ontologies', 'modified_proteins', 'drugs', 'mentions', 'side effects', 'clinical_variants', 'project', 'experiment']
    logger.info("The user {} chose to perform a minimal build, after creating the database from a dump".format(user))
    logger.info("Building database > step 1: Importing licensed ontologies and databases")
Пример #9
0
import os
import sys
import argparse
import pandas as pd
from datetime import datetime, timedelta
from passlib.hash import bcrypt
from ckg import ckg_utils
from ckg.graphdb_connector import connector
from ckg.graphdb_builder import builder_utils
from ckg.graphdb_builder.users import users_controller as uh

try:
    ckg_config = ckg_utils.read_ckg_config()
    log_config = ckg_config['graphdb_builder_log']
    logger = builder_utils.setup_logging(log_config, key='user_creation')
    uconfig = builder_utils.setup_config('users')
except Exception as err:
    logger.error("Reading configuration > {}.".format(err))


def create_user_from_dict(driver, data):
    """
    Creates graph database node for new user and adds properties to the node.

    :param driver: neo4j driver, which provides the connection to the neo4j graph database.
    :param dict data: dictionary with the user information).
    """
    query_name_node = 'create_user_node'
    result = None

    user_id = get_new_user_id(driver)
Пример #10
0
import os
import sys
from ckg import ckg_utils
from ckg.graphdb_connector import connector
from ckg.graphdb_builder import builder_utils
from ckg.graphdb_builder.builder import loader
from ckg.graphdb_builder.experiments import experiments_controller as eh

try:
    ckg_config = ckg_utils.read_ckg_config()
    log_config = ckg_config['report_manager_log']
    logger = builder_utils.setup_logging(log_config, key="project_creation")
except Exception as err:
    logger.error("Reading configuration > {}.".format(err))


def get_project_creation_queries():
    """
    Reads the YAML file containing the queries relevant to user creation, parses the given stream and \
    returns a Python object (dict[dict]).

    :return: Nested dictionary.
    """
    try:
        directory = os.path.dirname(os.path.abspath(__file__))
        queries_path = "../queries/project_creation_cypher.yml"
        project_creation_cypher = ckg_utils.get_queries(
            os.path.join(directory, queries_path))
    except Exception as err:
        exc_type, exc_obj, exc_tb = sys.exc_info()
        fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]