def activate_upload_form(projectid, download_style): m = '' style = {'pointer-events': 'none', 'opacity': 0.5} download_style.update({'display': 'none'}) report_title = '' report_href = '' driver = connector.getGraphDatabaseConnectionConfiguration() if driver is not None: if len(projectid) > 7: project = connector.find_node(driver, node_type='Project', parameters={'id': projectid}) if len(project) == 0: m = 'ERROR: Project "{}" does not exist in the database.'.format( projectid) else: if 'name' in project: report_title = 'Generate report: {}'.format( project['name']) report_href = '/apps/project?project_id={}&force=0'.format( projectid) m = 'Uploading data for Project: **{}**'.format( project['name']) style = {} else: m = 'ERROR: Database if temporarily offline. Contact your administrator or start the database.' return m, style, report_title, report_href
def query_data(self): data = {} try: queries_path = os.path.join(cwd, self.queries_file) project_cypher = query_utils.read_queries(queries_path) driver = connector.getGraphDatabaseConnectionConfiguration() replace = [("PROJECTID", self.identifier)] for query_name in project_cypher: title = query_name.lower().replace('_', ' ') query = project_cypher[query_name]['query'] query_type = project_cypher[query_name]['query_type'] for r, by in replace: query = query.replace(r, by) if query_type == "pre": data[title] = connector.getCursorData(driver, query) except Exception as err: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] logger.error( "Reading queries from file {}: {}, file: {},line: {}, error: {}" .format(queries_path, sys.exc_info(), fname, exc_tb.tb_lineno, err)) return data
def get_db_stats_data(): """ Retrieves all the stats data from the graph database and returns them as a dictionary. :return: Dictionary of dataframes. """ query_names = ['unique_projects', 'get_db_stats', 'get_db_store_size', 'get_db_transactions', 'get_db_kernel'] df_names = ['projects', 'meta_stats', 'store_size', 'transactions', 'kernel_monitor'] dfs = {} cypher = get_query() driver = connector.getGraphDatabaseConnectionConfiguration() if driver is not None: for i, j in zip(df_names, query_names): query = cypher[j]['query'] try: data = connector.getCursorData(driver, query) if i == 'store_size': data = data.T data['size'] = [size_converter(i) for i in data[0]] dfs[i] = data.to_json(orient='records') except Exception: pass return dfs
def parseUsersFile(expiration=365): """ Creates new user in the graph database and corresponding node, through the following steps: 1. Generates new user identifier 2. Checks if a user with given properties already exists in the database. If not: 3. Creates new local user (access to graph database) 4. Saves data to tab-delimited file. :param int expiration: number of days a user is given access. :return: Writes relevant .tsv file for the users in the provided file. """ usersDir = ckg_config['users_directory'] usersFile = os.path.join(usersDir, config['usersFile']) usersImportDir = ckg_config['imports_users_directory'] usersImportFile = os.path.join(usersImportDir, config['import_file']) driver = connector.getGraphDatabaseConnectionConfiguration(database=None) data = pd.read_excel(usersFile).applymap(str) date = datetime.today() + timedelta(days=expiration) df = [] try: user_identifier = get_new_user_identifier(driver) if user_identifier is None: user_identifier = 'U1' new_id = int(re.search('\d+', user_identifier).group()) for index, row in data.iterrows(): username = check_if_node_exists(driver, 'username', row['username']) name = check_if_node_exists(driver, 'name', row['name']) email = check_if_node_exists(driver, 'email', row['email']) if username.empty and name.empty and email.empty: row['ID'] = 'U{}'.format(new_id) row['acronym'] = ''.join( [c for c in row['name'] if c.isupper()]) row['rolename'] = 'reader' row['expiration_date'] = date.strftime('%Y-%m-%d') row['image'] = '' #create_db_user(driver, row) row['password'] = bcrypt.encrypt(row['password']) df.append(row) new_id += 1 except Exception as err: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] logger.error("Extracting users info: {}, file: {},line: {}".format( sys.exc_info(), fname, exc_tb.tb_lineno)) if len(df) > 0: data = pd.DataFrame(df) data['phone_number'] = data['phone_number'].str.split('.').str[0] data = data[[ 'ID', 'acronym', 'name', 'username', 'password', 'email', 'secondary_email', 'phone_number', 'affiliation', 'expiration_date', 'rolename', 'image' ]] GenerateGraphFiles(data, usersImportFile)
def find(self): user = None driver = connector.getGraphDatabaseConnectionConfiguration() if driver is not None: user = connector.find_node(driver, node_type="User", parameters={"username": self.username}) return user
def create_new_project(identifier, data, separator='|'): driver = connector.getGraphDatabaseConnectionConfiguration() project_result, projectId = projectCreation.create_new_project( driver, identifier, pd.read_json(data), separator=separator) if projectId is not None: result = {str(projectId): str(project_result)} else: result = {} return result
def create_user(data, output_file, expiration=365): """ Creates new user in the graph database and corresponding node, through the following steps: 1. Checks if a user with given properties already exists in the database. If not: 2. Generates new user identifier 3. Creates new local user (access to graph database) 4. Creates new user node 5. Saves data to users.tsv :param data: pandas dataframe with users as rows and arguments and columns. :param str output_file: path to output csv file. :param int expiration: number of days users is given access. :return: Writes relevant .tsv file for the users in data. """ driver = connector.getGraphDatabaseConnectionConfiguration(database=None) date = datetime.today() + timedelta(days=expiration) df = [] try: for index, row in data.iterrows(): found = validate_user(driver, row['username'], row['email']) if not found: user_identifier = get_new_user_id(driver) row['ID'] = user_identifier row['acronym'] = ''.join( [c for c in row['name'] if c.isupper()]) row['rolename'] = 'reader' row['expiration_date'] = date.strftime('%Y-%m-%d') row['image'] = '' uh.create_db_user(driver, row) row['password'] = bcrypt.hash(row['password']) create_user_node(driver, row) df.append(row) else: print( "User already in the database. Check username and email address." ) except Exception as err: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] logger.error("Creating users: file: {},line: {}, error: {}".format( fname, exc_tb.tb_lineno, err)) if len(df) > 0: data = pd.DataFrame(df) data['phone_number'] = data['phone_number'].str.split('.').str[0] data = data[[ 'ID', 'acronym', 'name', 'username', 'password', 'email', 'secondary_email', 'phone_number', 'affiliation', 'expiration_date', 'rolename', 'image' ]] uh.GenerateGraphFiles(data, output_file)
def validate_user(self): user = None email = None driver = connector.getGraphDatabaseConnectionConfiguration() if driver is not None: user = connector.find_node(driver, node_type="User", parameters={"username": self.username}) email = connector.find_node(driver, node_type="User", parameters={'email': self.email}) return user is None and email is None
def get_mapping_analytical_samples(project_id): from ckg.graphdb_connector import connector driver = connector.getGraphDatabaseConnectionConfiguration() mapping = {} query = "MATCH (p:Project)-[:HAS_ENROLLED]-(:Subject)-[:BELONGS_TO_SUBJECT]-()-[:SPLITTED_INTO]-(a:Analytical_sample) WHERE p.id='{}' RETURN a.external_id, a.id".format( project_id) mapping = connector.getCursorData(driver, query) if not mapping.empty: mapping = mapping.set_index("a.external_id").to_dict( orient='dict')["a.id"] return mapping
def create_new_identifiers(project_id, data, directory, filename): driver = connector.getGraphDatabaseConnectionConfiguration() upload_result = dataUpload.create_experiment_internal_identifiers( driver, project_id, pd.read_json(data, dtype={ 'subject external_id': object, 'biological_sample external_id': object, 'analytical_sample external_id': object }), directory, filename) res_n = dataUpload.check_samples_in_project(driver, project_id) return {str(project_id): str(upload_result), 'res_n': res_n.to_dict()}
def getMappingFromDatabase(id_list, node, attribute_from='id', attribute_to='name'): id_list = ["'{}'".format(i) for i in id_list] driver = connector.getGraphDatabaseConnectionConfiguration() mapping_query = "MATCH (n:{}) WHERE n.{} IN [{}] RETURN n.{} AS from, n.{} AS to" mapping = connector.getCursorData( driver, mapping_query.format(node, attribute_from, ','.join(id_list), attribute_from, attribute_to)) if not mapping.empty: mapping = dict(zip(mapping['from'], mapping['to'])) return mapping
def fullUpdate(): """ Main method that controls the population of the graph database. Firstly, it gets a connection \ to the database (driver) and then initiates the update of the entire database getting \ all the graph entities to update from configuration. Once the graph database has been \ populated, the imports folder in data/ is compressed and archived in the archive/ folder \ so that a backup of the imports files is kept (full). """ imports = config["graph"] driver = connector.getGraphDatabaseConnectionConfiguration() logger.info("Full update of the database - Updating: {}".format( ",".join(imports))) updateDB(driver, imports) logger.info( "Full update of the database - Update took: {}".format(datetime.now() - START_TIME)) logger.info("Full update of the database - Archiving imports folder") archiveImportDirectory(archive_type="full") logger.info("Full update of the database - Archiving took: {}".format( datetime.now() - START_TIME))
def get_sdrf(self): sdrf_df = pd.DataFrame() try: driver = connector.getGraphDatabaseConnectionConfiguration() query_path = os.path.join(cwd, self.queries_file) project_cypher = query_utils.read_queries(query_path) query = query_utils.get_query(project_cypher, query_id="project_sdrf") df = connector.getCursorData( driver, query.replace("PROJECTID", self.identifier)) sdrf_df = builder_utils.convert_ckg_to_sdrf(df) except Exception as err: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] logger.error( "Error: {}. Reading queries from file {}: {}, file: {},line: {}" .format(err, query_path, sys.exc_info(), fname, exc_tb.tb_lineno)) return sdrf_df
def register(self): result = False driver = connector.getGraphDatabaseConnectionConfiguration() if driver is None: result = 'error_msg' else: found = self.find() if found is not None: result = "error_exists" elif self.validate_user(): result = create_user.create_user_from_dict( driver, self.to_dict()) if result is not None: result = 'ok' else: result = 'error_database' else: result = 'error_email' return result
def partialUpdate(imports, specific=[]): """ Method that controls the update of the graph database with the specified entities and \ relationships. Firstly, it gets a connection to the database (driver) and then initiates \ the update of the specified graph entities. \ Once the graph database has been populated, the data files uploaded to the graph are compressed \ and archived in the archive/ folder (partial). :param list imports: list of entities to update """ driver = connector.getGraphDatabaseConnectionConfiguration() logger.info("Partial update of the database - Updating: {}".format( ",".join(imports))) updateDB(driver, imports, specific) logger.info("Partial update of the database - Update took: {}".format( datetime.now() - START_TIME)) logger.info("Partial update of the database - Archiving imports folder") #archiveImportDirectory(archive_type="partial") logger.info( "Partial update of the database - Archiving {} took: {}".format( ",".join(imports), datetime.now() - START_TIME))
def convert_ckg_clinical_to_sdrf(df): out_mapping = {'tissue':'characteristics[organism part]', 'disease': 'characteristics[disease]', 'grouping1': 'characteristics[phenotype]', 'analytical_sample': 'comment[data file]', 'subject': 'characteristics[individual]', 'biological_sample': 'source name'} cols = [] for c in df.columns: matches = re.search(r'(\d+)', c) if matches: cols.append(c) driver = connector.getGraphDatabaseConnectionConfiguration() query = '''MATCH (ef:Experimental_factor)-[r:MAPS_TO]-(c:Clinical_variable) WHERE c.name+' ('+c.id+')' IN {} RETURN c.name+' ('+c.id+')' AS from, "characteristic["+ef.name+"]" AS to, LABELS(c)''' mapping = connector.getCursorData(driver, query.format(cols)) mapping = dict(zip(mapping['from'], mapping['to'])) mapping.update(out_mapping) df = df.rename(mapping, axis=1) return df
def convert_sdrf_to_ckg(df): in_mapping = {'organism part': 'tissue', 'disease': 'disease', 'phenotype': 'grouping1', 'data file': 'analytical_sample external_id', 'individual':'subject external_id', 'source name':'biological_sample external_id'} cols = {} for c in df.columns: matches = re.search(r'\[(.+)\]', c) if matches: cols[c] = matches.group(1) driver = connector.getGraphDatabaseConnectionConfiguration() query = '''MATCH (ef:Experimental_factor)-[r:MAPS_TO]-(c:Clinical_variable) WHERE ef.name IN {} RETURN ef.name AS from, c.name+' ('+c.id+')' AS to, LABELS(c)''' mapping = connector.getCursorData(driver, query.format(list(cols.values()))) mapping = dict(zip(mapping['from'], mapping['to'])) mapping.update(in_mapping) df = df.rename(cols, axis=1).rename(mapping, axis=1) return df
def get_similarity_network(self): plot = None try: query_path = os.path.join(cwd, self.queries_file) project_cypher = query_utils.read_queries(query_path) query = query_utils.get_query(project_cypher, query_id="projects_subgraph") list_projects = [] driver = connector.getGraphDatabaseConnectionConfiguration() if self.similar_projects is not None: if "other_id" in self.similar_projects: list_projects = self.similar_projects[ "other_id"].values.tolist() list_projects.append(self.identifier) list_projects = ",".join( ['"{}"'.format(i) for i in list_projects]) query = query.replace("LIST_PROJECTS", list_projects) path = connector.sendQuery(driver, query, parameters={}) G = acore_utils.neo4j_path_to_networkx(path, key='path') args = {} style, layout = self.get_similarity_network_style() args['stylesheet'] = style args['layout'] = layout args['title'] = "Projects subgraph" net, mouseover = acore_utils.networkx_to_cytoscape(G) plot = viz.get_cytoscape_network(net, "projects_subgraph", args) except Exception as err: exc_type, exc_obj, exc_tb = sys.exc_info() fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] logger.error( "Error: {}. Reading queries from file {}: {}, file: {},line: {}" .format(err, query_path, sys.exc_info(), fname, exc_tb.tb_lineno)) return plot
def create_project(n_clicks, name, acronym, responsible, participant, datatype, timepoints, related_to, disease, tissue, intervention, description, start_date, end_date): if n_clicks > 0: session_cookie = flask.request.cookies.get('custom-auth-session') responsible = separator.join(responsible) participant = separator.join(participant) datatype = separator.join(datatype) disease = separator.join(disease) tissue = separator.join(tissue) arguments = [name, datatype, disease, tissue, responsible] driver = connector.getGraphDatabaseConnectionConfiguration() if driver is not None: # Check if clinical variables exist in the database if intervention is not None: intervention = intervention.strip() if intervention != '': interventions = list() exist = dict() for i in intervention.split(separator): res = projectCreation.check_if_node_exists( driver, 'Clinical_variable', 'id', i) if res.empty: exist[i] = True else: exist[i] = False interventions.append('{} ({})'.format( res['n.name'][0], i)) intervention = separator.join(interventions) if any(exist.values()): response = 'The intervention(s) "{}" specified does(do) not exist.'.format( ', '.join( [k for k, n in exist.items() if n == True])) return response, None, { 'display': 'none' }, { 'display': 'none' } if any(not arguments[n] for n, i in enumerate(arguments)): response = "Insufficient information to create project. Fill in all fields with '*'." return response, None, {'display': 'none'}, {'display': 'none'} # Get project data from filled-in fields projectData = pd.DataFrame([ name, acronym, description, related_to, datatype, timepoints, disease, tissue, intervention, responsible, participant, start_date, end_date ]).T projectData.columns = [ 'name', 'acronym', 'description', 'related_to', 'datatypes', 'timepoints', 'disease', 'tissue', 'intervention', 'responsible', 'participant', 'start_date', 'end_date' ] projectData['status'] = '' projectData.fillna(value=pd.np.nan, inplace=True) projectData.replace('', np.nan, inplace=True) # Generate project internal identifier bsed on timestamp # Excel file is saved in folder with internal id name epoch = time.time() internal_id = "%s%d" % ("CP", epoch) projectData.insert(loc=0, column='internal_id', value=internal_id) result = create_new_project.apply_async( args=[internal_id, projectData.to_json(), separator], task_id='project_creation_' + session_cookie + internal_id, queue='creation') result_output = result.get() if len(result_output) > 0: external_id = list(result_output.keys())[0] done_msg = result_output[external_id] if external_id != '' and done_msg is not None: response = "Project successfully submitted. Download Clinical Data template." elif done_msg is None: response = "There was a problem when creating the project. Please, contact the administrator." else: response = 'A project with the same name already exists in the database.' else: response = "There was a problem when creating the project. Please, try again or contact the administrator." external_id = response else: response = "The Database is temporarily offline. Contact your administrator or start the datatabase." return response, '- ' + external_id, { 'display': 'inline-block' }, { 'display': 'block' } else: return None, None, {'display': 'none'}, {'display': 'none'}
def send_query(self, query): driver = connector.getGraphDatabaseConnectionConfiguration() data = connector.getCursorData(driver, query) return data
def run_processing(n_clicks, project_id): message = None style = {'display': 'none'} table = None if n_clicks > 0: session_cookie = flask.request.cookies.get('custom-auth-session') destDir = os.path.join(ckg_config['experiments_directory'], project_id) builder_utils.checkDirectory(destDir) temporaryDirectory = os.path.join(ckg_config['tmp_directory'], session_cookie + "upload") datasets = builder_utils.listDirectoryFoldersNotEmpty( temporaryDirectory) driver = connector.getGraphDatabaseConnectionConfiguration() if driver is not None: res_n = dataUpload.check_samples_in_project(driver, project_id) if 'experimental_design' in datasets: dataset = 'experimental_design' directory = os.path.join(temporaryDirectory, dataset) destination = os.path.join(destDir, dataset) experimental_files = os.listdir(directory) regex = r"{}.+".format(config['file_design'].replace( 'PROJECTID', project_id)) r = re.compile(regex) experimental_filename = list( filter(r.match, experimental_files)) if len(experimental_filename) > 0: experimental_filename = experimental_filename.pop() designData = builder_utils.readDataset( os.path.join(directory, experimental_filename)) designData = designData.astype(str) designData.columns = [ c.lower() for c in designData.columns ] if 'subject external_id' in designData.columns and 'biological_sample external_id' in designData.columns and 'analytical_sample external_id' in designData.columns: if (res_n > 0).any().values.sum() > 0: res = dataUpload.remove_samples_nodes_db( driver, project_id) res_n = dataUpload.check_samples_in_project( driver, project_id) if (res_n > 0).any().values.sum() > 0: message = 'ERROR: There is already an experimental design loaded into the database and there was an error when trying to delete it. Contact your administrator.' return message, style, style, table res_n = None result = create_new_identifiers.apply_async( args=[ project_id, designData.to_json(), directory, experimental_filename ], task_id='data_upload_' + session_cookie + datetime.now().strftime('%Y%m-%d%H-%M%S-'), queue='creation') result_output = result.wait(timeout=None, propagate=True, interval=0.2) res_n = pd.DataFrame.from_dict(result_output['res_n']) builder_utils.copytree(directory, destination) else: message = 'ERROR: The Experimental design file provided ({}) is missing some of the required fields: {}'.format( experimental_filename, ','.join([ 'subject external_id', 'biological_sample external_id', 'analytical_sample external_id' ])) builder_utils.remove_directory(directory) return message, style, style, table if 'clinical' in datasets: dataset = 'clinical' directory = os.path.join(temporaryDirectory, dataset) clinical_files = os.listdir(directory) regex = r"{}.+".format(config['file_clinical'].replace( 'PROJECTID', project_id)) r = re.compile(regex) clinical_filename = list(filter(r.match, clinical_files)) if len(clinical_filename) > 0: clinical_filename = clinical_filename.pop() data = builder_utils.readDataset( os.path.join(directory, clinical_filename)) data.columns = [c.lower() for c in data.columns] external_ids = {} if 'subject external_id' in data and 'biological_sample external_id' in data: external_ids['subjects'] = data[ 'subject external_id'].astype( str).unique().tolist() external_ids['biological_samples'] = data[ 'biological_sample external_id'].astype( str).unique().tolist() dataUpload.create_mapping_cols_clinical( driver, data, directory, clinical_filename, separator=separator) if 0 in res_n.values: samples = ', '.join( [k for (k, v) in res_n if v == 0]) message = 'ERROR: No {} for project {} in the database. Please upload first the experimental design (ExperimentalDesign_{}.xlsx)'.format( samples, project_id, project_id) builder_utils.remove_directory(directory) return message, style, style, table else: db_ids = dataUpload.check_external_ids_in_db( driver, project_id).to_dict() message = '' intersections = {} differences_in = {} differences_out = {} for col in external_ids: intersect = list( set(db_ids[col].values()).intersection( external_ids[col])) difference_in = list( set(db_ids[col].values()).difference( external_ids[col])) difference_out = list( set(external_ids[col]).difference( set(db_ids[col].values()))) if len(difference_in) > 0 or len( difference_out) > 0: intersections[col] = intersect differences_in[col] = difference_in differences_out[col] = difference_out for col in intersections: message += 'WARNING: Some {} identifiers were not matched:\n Matching: {}\n No information provided: {} \n Non-existing in the database: {}\n'.format( col, len(intersections[col]), ','.join(differences_in[col]), ','.join(differences_out[col])) else: message = 'ERROR: Format of the Clinical Data file is not correct. Check template in the documentation. Check columns: subject external_id, biological_sample external_id and analytical_sample external_id' builder_utils.remove_directory(directory) return message, style, style, table try: for dataset in datasets: if dataset != "experimental_design": source = os.path.join(temporaryDirectory, dataset) destination = os.path.join(destDir, dataset) builder_utils.copytree(source, destination) datasetPath = os.path.join( os.path.join( ckg_config['imports_experiments_directory'], project_id), dataset) eh.generate_dataset_imports(project_id, dataset, datasetPath) loader.partialUpdate(imports=['experiment'], specific=[project_id]) filename = os.path.join(ckg_config['tmp_directory'], 'Uploaded_files_' + project_id) utils.compress_directory(filename, temporaryDirectory, compression_format='zip') style.update({'display': 'inline-block'}) message = 'Files successfully uploaded.' table = dataUpload.get_project_information(driver, project_id) if table is None: message = 'Error: No data was uploaded for project: {}. Review your experimental design and data files.'.format( project_id) except Exception as err: style.update({'display': 'none'}) message = str(err) else: style.update({'display': 'none'}) message = "ERROR: Database is offline. Contact your administrator or start the database." return message, style, style, table
def get_db_schema(): """ Retrieves the database schema :return: network with all the database nodes and how they are related """ style = [{'selector': 'node', 'style': {'label': 'data(name)', 'background-color': 'data(color)', 'text-valign': 'center', 'text-halign': 'center', 'border-color': 'gray', 'border-width': '1px', 'width': 55, 'height': 55, 'opacity': 0.8, 'font-size': '14'}}, {'selector': 'edge', 'style': {'label': 'data(label)', 'curve-style': 'bezier', 'opacity': 0.7, 'width': 0.4, 'font-size': '5'}}] layout = {'name': 'cose', 'idealEdgeLength': 100, 'nodeOverlap': 20, 'refresh': 20, 'randomize': False, 'componentSpacing': 100, 'nodeRepulsion': 400000, 'edgeElasticity': 100, 'nestingFactor': 5, 'gravity': 80, 'numIter': 1000, 'initialTemp': 200, 'coolingFactor': 0.95, 'minTemp': 1.0} query_name = 'db_schema' cypher = get_query() driver = connector.getGraphDatabaseConnectionConfiguration() if driver is not None: if query_name in cypher: if 'query' in cypher[query_name]: query = cypher[query_name]['query'] try: path = connector.sendQuery(driver, query, parameters={}) G = utils.neo4j_schema_to_networkx(path) args = {'height': '1000px'} args['stylesheet'] = style args['layout'] = layout args['title'] = "Database Schema" net, mouseover = utils.networkx_to_cytoscape(G) plot = viz.get_cytoscape_network(net, "db_schema", args) except Exception as err: plot = html.Div(children=html.H1("Error accessing the database statistics", className='error_msg')) else: plot = html.Div(children=html.H1("Error: Cypher query {} for accessing the database statistics does not exist".format(query_name), className='error_msg')) else: plot = html.Div(children=html.H1("Database is offline", className='error_msg')) return plot
def quick_numbers_panel(): """ Creates a panel of Dash containers where an overviem of the graph database numbers can be plotted. :return: List of Dash components. """ project_ids = [] project_links = [html.H4('No available Projects')] try: driver = connector.getGraphDatabaseConnectionConfiguration() if driver is not None: projects = connector.find_nodes(driver, node_type='Project', parameters={}) for project in projects: project_ids.append((project['n']['name'], project['n']['id'])) project_links = [html.H4('Available Projects:')] except Exception: pass for project_name, project_id in project_ids: project_links.append(html.A(project_name.title(), id='link-internal', href='/apps/project?project_id={}&force=0'.format(project_id), target='', n_clicks=0, className="button_link")) project_dropdown = [html.H6('Project finder:'), dcc.Dropdown(id='project_option', options=[{'label': name, 'value': (name, value)} for name, value in project_ids], value='', multi=False, clearable=True, placeholder='Search...', style={'width': '50%'}), html.H4('', id='project_url')] navigation_links = [html.H4('Navigate to:'), html.A("Database Imports", href="/apps/imports", className="nav_link"), html.A("Project Creation", href="/apps/projectCreationApp", className="nav_link"), html.A("Data Upload", href="/apps/dataUploadApp", className="nav_link"), html.A("Admin", href="/apps/admin", className="nav_link")] layout = [html.Div(children=navigation_links), html.Div(children=project_links[0:5]), html.Div(children=project_dropdown), html.Div(children=get_db_schema()), dcc.Store(id='db_stats_df', data=get_db_stats_data()), html.Div(id='db-creation-date'), html.Br(), html.H3('Overview'), html.Div(children=[indicator("#EF553B", "No. of Entities", "db_indicator_1"), indicator("#EF553B", "No. of Labels", "db_indicator_2"), indicator( "#EF553B", "No. of Relationships", "db_indicator_3"), indicator( "#EF553B", "No. of Relationship Types", "db_indicator_4"), indicator("#EF553B", "No. of Property Keys", "db_indicator_5")]), html.Div(children=[indicator("#EF553B", "Entities store", "db_indicator_6"), indicator( "#EF553B", "Relationships store", "db_indicator_7"), indicator("#EF553B", "Property store", "db_indicator_8"), indicator("#EF553B", "String store", "db_indicator_9"), indicator("#EF553B", "Array store", "db_indicator_10")]), html.Div(children=[indicator("#EF553B", "Logical Log size", "db_indicator_11"), indicator( "#EF553B", "No. of Transactions (opened)", "db_indicator_12"), indicator( "#EF553B", "No. of Transactions (committed)", "db_indicator_13"), indicator("#EF553B", "No. of Projects", "db_indicator_14")]), html.Br(), html.Br() ] return layout
def buildPage(self): """ Builds page with the basic layout from *basicApp.py* and adds relevant Dash components for project creation. """ self.add_basic_layout() driver = connector.getGraphDatabaseConnectionConfiguration() if driver is not None: try: users = [] tissues = [] diseases = [] user_nodes = connector.find_nodes(driver, node_type='User') tissue_nodes = connector.find_nodes(driver, node_type='Tissue') disease_nodes = connector.find_nodes(driver, node_type='Disease') for user in user_nodes: users.append((user['n']['name'])) for tissue in tissue_nodes: tissues.append((tissue['n']['name'])) for disease in disease_nodes: diseases.append((disease['n']['name'])) layout = [html.Div([ html.Div([html.H4('Project information', style={'width': '15.5%', 'verticalAlign': 'top', 'display': 'inline-block'}), html.H4('', id='update_project_id', style={'width': '15%', 'verticalAlign': 'top', 'display': 'none'}), html.Br(), html.Div(children=[html.Label('Project name:*', style={'marginTop': 15}), dcc.Input(id='project name', placeholder='Insert name...', type='text', style={'width': '100%', 'height': '35px'})], style={'width': '100%'}), html.Br(), html.Div(children=[html.Label('Project Acronym:', style={'marginTop': 15}), dcc.Input(id='project acronym', placeholder='Insert name...', type='text', style={'width': '100%', 'height': '35px'})], style={'width': '100%'}), html.Br(), html.Div(children=[html.Label('Project Responsible:*', style={'marginTop': 15})], style={'width': '49%', 'verticalAlign': 'top', 'display': 'inline-block'}), html.Div(children=[html.Label('Project Participants:*', style={'marginTop': 15})], style={'width': '49%', 'marginLeft': '2%', 'verticalAlign': 'top', 'display': 'inline-block'}), html.Div(children=[dcc.Dropdown(id='responsible-picker', options=[{'label': i, 'value': i} for i in users], value=[], multi=True, searchable=True, style={'width': '100%'})], style={'width': '49%', 'verticalAlign': 'top', 'display': 'inline-block'}), html.Div(children=[dcc.Dropdown(id='participant-picker', options=[{'label': i, 'value': i} for i in users], value=[], multi=True, searchable=True, style={'width': '100%'})], style={'width': '49%', 'marginLeft': '2%', 'verticalAlign': 'top', 'display': 'inline-block'}), html.Br(), html.Br(), html.Div(children=[html.Label('Project Data Types:*', style={'marginTop': 10})], style={'width': '49%', 'marginLeft': '0%', 'verticalAlign': 'top', 'display': 'inline-block'}), html.Div(children=[html.Label('Project Disease:*', style={'marginTop': 10})], style={'width': '49%', 'marginLeft': '2%', 'verticalAlign': 'top', 'display': 'inline-block'}), html.Div(children=[dcc.Dropdown(id='data-types-picker', options=[{'label': i, 'value': i} for i in DataTypes], value=[], multi=True, searchable=True, style={'width': '100%'})], style={'width': '49%', 'marginLeft': '0%', 'verticalAlign': 'top', 'display': 'inline-block'}), html.Div(children=[dcc.Dropdown(id='disease-picker', options=[{'label': i, 'value': i} for i in diseases], value=[], multi=True, searchable=True, style={'width': '100%'})], style={'width': '49%', 'marginLeft': '2%', 'verticalAlign': 'top', 'display': 'inline-block'}), html.Br(), html.Br(), html.Div(children=[html.Label('Project Tissue:*', style={'marginTop': 10})], style={'width': '49%', 'marginLeft': '0%', 'verticalAlign': 'top', 'display': 'inline-block'}), html.Div(children=[html.Label('Project Intervention:', style={'marginTop': 10})], style={'width': '49%', 'marginLeft': '2%', 'verticalAlign': 'top', 'display': 'inline-block'}), html.Div(children=[dcc.Dropdown(id='tissue-picker', options=[{'label': i, 'value': i} for i in tissues], value=[], multi=True, searchable=True, style={'width': '100%'})], style={'width': '49%', 'marginLeft': '0%', 'verticalAlign': 'top', 'display': 'inline-block'}), html.Div(children=[dcc.Input(id='intervention-picker', placeholder='E.g. SNOMED identifier|SNOMED identifier|...', type='text', style={'width': '100%', 'height': '54px'})], style={'width': '49%', 'marginLeft': '2%', 'verticalAlign': 'top', 'display': 'inline-block'}), html.Br(), html.Br(), html.Div(children=[html.Label('Timepoints:', style={'marginTop': 15}), dcc.Input(id='number_timepoints', placeholder='E.g. 2 months|15 days|24 hours...', type='text', style={'width': '100%', 'height': '35px'})], style={'width': '49%', 'marginLeft': '0%', 'verticalAlign': 'top', 'display': 'inline-block'}), html.Br(), html.Br(), html.Div(children=[html.Label('Follows up project:', style={'marginTop': 15}), dcc.Input(id='related_to', placeholder='Use the Project Identifier (P000000X)', type='text', style={'width': '100%', 'height': '35px'})], style={'width': '49%', 'marginLeft': '0%', 'verticalAlign': 'top', 'display': 'inline-block'}), html.Br(), html.Br(), html.Div(children=[html.Label('Project Description:', style={'marginTop': 15}), dcc.Textarea(id='project description', placeholder='Enter description...', style={'width': '100%', 'height': '100px'})]), html.Br(), html.Div(children=[html.Label('Starting Date:', style={'marginTop': 10}), dcc.DatePickerSingle(id='date-picker-start', placeholder='Select date...', clearable=True)], style={'width': '30%', 'verticalAlign': 'top', 'marginTop': 10, 'display': 'inline-block'}), html.Div(children=[html.Label('Ending Date:', style={'marginTop': 10}), dcc.DatePickerSingle(id='date-picker-end', placeholder='Select date...', clearable=True)], style={'width': '30%', 'verticalAlign': 'top', 'marginTop': 10, 'display': 'inline-block'}), html.Div(children=html.Button('Create Project', id='project_button', n_clicks=0, className="button_link"), style={'width': '100%', 'padding-left': '87%', 'padding-right': '0%'}), html.Br(), html.Div(children=[html.A(children=html.Button('Download Clinical Data template', id='download_button', n_clicks=0, style={'fontSize': '16px', 'display': 'block'}), id='download_link', href='', n_clicks=0)], style={'width': '100%', 'padding-left': '87%', 'padding-right': '0%'}), html.Br(), html.Div(children=[html.H1(id='project-creation')]), html.Br()]), html.Hr()])] except Exception as e: layout = [html.Div(children=html.H1("Database is offline", className='error_msg'))] self.extend_layout(layout)