def upload_cbn_dir(dir_path, manager): """Uploads CBN data to edge store :param str dir_path: Directory full of CBN JGIF files :param pybel.Manager manager: """ t = time.time() for jfg_path in os.listdir(dir_path): if not jfg_path.endswith('.jgf'): continue path = os.path.join(dir_path, jfg_path) log.info('opening %s', path) with open(path) as f: cbn_jgif_dict = json.load(f) graph = pybel.from_cbn_jgif(cbn_jgif_dict) out_path = os.path.join(dir_path, jfg_path.replace('.jgf', '.bel')) with open(out_path, 'w') as o: pybel.to_bel(graph, o) strip_annotations(graph) enrich_pubmed_citations(manager=manager, graph=graph) pybel.to_database(graph, manager=manager) log.info('') log.info('done in %.2f', time.time() - t)
def upload_recursive(directory, connection=None, exclude_directory_pattern=None): """Recursively uploads all gpickles in a given directory and sub-directories :param str directory: the directory to traverse :param connection: A connection string or manager :type connection: Optional[str or pybel.manage.Manager] :param Optional[str] exclude_directory_pattern: Any directory names to exclude """ manager = Manager.ensure(connection) paths = list( get_paths_recursive( directory, extension='.gpickle', exclude_directory_pattern=exclude_directory_pattern)) log.info('Paths to upload: %s', paths) for path in paths: try: network = from_pickle(path) except (ImportError, ImportVersionWarning): log.warning( '%s uses a pickle from an old version of PyBEL. Quitting.', path) continue to_database(network, connection=manager, store_parts=True)
def upload(path, connection, recursive, skip_check_version, to_service, service_url, debug): """Quick uploader""" set_debug_param(debug) if recursive: log.info('uploading recursively from: %s', path) upload_recursive(path, connection=connection) else: graph = from_pickle(path, check_version=(not skip_check_version)) if to_service: receiver_service.post(graph, service_url) else: to_database(graph, connection=connection)
def upload(manager, path, skip_check_version, to_service, service_url, exclude_directory_pattern, debug): """Upload gpickles""" set_debug_param(debug) if os.path.isdir(path): log.info('uploading recursively from: %s', path) upload_recursive(path, connection=manager, exclude_directory_pattern=exclude_directory_pattern) elif os.path.isfile(path): from pybel import from_pickle graph = from_pickle(path, check_version=(not skip_check_version)) if to_service: from pybel import to_web to_web(graph, service_url) else: from pybel import to_database to_database(graph, connection=manager, store_parts=True)
def to_database(self, manager=None): """Send the model to the PyBEL database This function wraps :py:func:`pybel.to_database`. Parameters ---------- manager : Optional[pybel.manager.Manager] A PyBEL database manager. If none, first checks the PyBEL configuration for ``PYBEL_CONNECTION`` then checks the environment variable ``PYBEL_REMOTE_HOST``. Finally, defaults to using SQLite database in PyBEL data directory (automatically configured by PyBEL) Returns ------- network : Optional[pybel.manager.models.Network] The SQLAlchemy model representing the network that was uploaded. Returns None if upload fails. """ network = pybel.to_database(self.model, manager=manager) return network
def test_upload_with_tloc(self, mock_get): """Test that the RAS translocation example graph can be uploaded.""" make_dummy_namespaces(self.manager, ras_tloc_graph) to_database(ras_tloc_graph, manager=self.manager)
def test_reload(self, mock_get): """Test that a graph with the same name and version can't be added twice.""" graph = sialic_acid_graph.copy() self.assertEqual('1.0.0', graph.version) to_database(graph, manager=self.manager) time.sleep(1) self.assertEqual(1, self.manager.count_networks()) networks = self.manager.list_networks() self.assertEqual(1, len(networks)) network = networks[0] self.assertEqual(graph.name, network.name) self.assertEqual(graph.version, network.version) self.assertEqual(graph.description, network.description) reconstituted = self.manager.get_graph_by_name_version( graph.name, graph.version) self.assertIsInstance(reconstituted, BELGraph) self.assertEqual(graph.nodes(data=True), reconstituted.nodes(data=True)) # self.bel_thorough_reconstituted(reconstituted) self.assertEqual(1, self.manager.count_networks()) graph_copy = graph.copy() graph_copy.version = '1.0.1' network_copy = self.manager.insert_graph(graph_copy) time.sleep( 1) # Sleep so the first graph always definitely goes in first self.assertNotEqual(network.id, network_copy.id) self.assertTrue( self.manager.has_name_version(graph_copy.name, graph_copy.version)) self.assertFalse(self.manager.has_name_version('wrong name', '0.1.2')) self.assertFalse( self.manager.has_name_version(graph_copy.name, '0.1.2')) self.assertFalse( self.manager.has_name_version('wrong name', graph_copy.version)) self.assertEqual(2, self.manager.count_networks()) self.assertEqual( '1.0.1', self.manager.get_most_recent_network_by_name(graph.name).version) query_ids = {-1, network.id, network_copy.id} query_networks_result = self.manager.get_networks_by_ids(query_ids) self.assertEqual(2, len(query_networks_result)) self.assertEqual({network.id, network_copy.id}, {network.id for network in query_networks_result}) expected_versions = {'1.0.1', '1.0.0'} self.assertEqual(expected_versions, set(self.manager.get_network_versions(graph.name))) exact_name_version = from_database(graph.name, graph.version, manager=self.manager) self.assertEqual(graph.name, exact_name_version.name) self.assertEqual(graph.version, exact_name_version.version) exact_name_version = from_database(graph.name, '1.0.1', manager=self.manager) self.assertEqual(graph.name, exact_name_version.name) self.assertEqual('1.0.1', exact_name_version.version) most_recent_version = from_database(graph.name, manager=self.manager) self.assertEqual(graph.name, most_recent_version.name) self.assertEqual('1.0.1', exact_name_version.version) recent_networks = list(self.manager.list_recent_networks() ) # just try it to see if it fails self.assertIsNotNone(recent_networks) self.assertEqual([(network.name, '1.0.1')], [(n.name, n.version) for n in recent_networks]) self.assertEqual('1.0.1', recent_networks[0].version)
Transphosphorylation(egfr_dimer, 'Y', '1173'), ] ev = Evidence('assertion', 'assertion', 'assertion', 'assertion') for stmt in stmts: stmt.evidence = [ev] model_description = 'Test of INDRA Statement assembly into PyBEL.' print("Assembling to PyBEL...") pba = PybelAssembler( stmts, name='INDRA_PyBEL_test', description=model_description, authors='John Bachman', ) belgraph = pba.make_model() # Write to BEL file #pybel.to_bel_path(belgraph, 'simple_pybel.bel') # Upload to PyBEL web #with open('pybel_model.json', 'wt') as f: # pybel.to_json_file(pba.model, f) #url = 'https://pybel.scai.fraunhofer.de/api/receive' #headers = {'content-type': 'application/json'} #requests.post(url, json=pybel.to_json(pba.model), headers=headers) # Put in local database pybel.to_database(pba.model)
def convert_paths(paths, connection=None, upload=False, pickle=False, canonicalize=True, infer_central_dogma=True, enrich_citations=False, send=False, version_in_path=False, **kwargs): """Recursively parses and either uploads/pickles graphs in a given set of files :param iter[str] paths: The paths to convert :param connection: The connection :type connection: None or str or pybel.manager.Manager :param bool upload: Should the networks be uploaded to the cache? :param bool pickle: Should the networks be saved as pickles? :param bool canonicalize: Calculate canonical nodes? :param bool infer_central_dogma: Should the central dogma be inferred for all proteins, RNAs, and miRNAs :param bool enrich_citations: Should the citations be enriched using Entrez Utils? :param bool send: Send to PyBEL Web? :param bool version_in_path: Add the current pybel version to the pathname :param kwargs: Parameters to pass to :func:`pybel.from_path` """ manager = Manager.ensure(connection) failures = [] for path in paths: log.info('parsing: %s', path) try: graph = from_path(path, manager=manager, **kwargs) except Exception as e: log.exception('problem parsing %s', path) failures.append((path, e)) continue if canonicalize: add_canonical_names(graph) if infer_central_dogma: infer_central_dogma_mutator(graph) if enrich_citations: enrich_pubmed_citations(graph=graph, manager=manager) if upload: to_database(graph, connection=manager, store_parts=True) if pickle: name = path[:-len( '.bel')] # gets rid of .bel at the end of the file name if version_in_path: new_path = '{}-{}.gpickle'.format(name, get_pybel_version()) else: new_path = '{}.gpickle'.format(name) to_pickle(graph, new_path) log.info('output pickle: %s', new_path) if send: response = to_web(graph) log.info('sent to PyBEL Web with response: %s', response.json()) return failures
def upload_bel(self, connection=None): graph = self.to_bel_graph() pybel.to_database(graph, connection=connection)