def run(network): generator = KernelGenerator() # First, read network file as table small_net_df = pd.read_csv(network, sep='\t', names=['source', 'interaction', 'target']) print(small_net_df.shape) sources = small_net_df['source'] targets = small_net_df['target'] merged = sources.append(targets) print(len(merged.unique())) # Calculate kernel small_kernel = generator.create_kernel(network) # Serialize file util.write_kernel(small_kernel, 'kernel_out.txt') # Get kernel as big string self.assertIsNotNone(small_kernel) self.assertTrue(isinstance(small_kernel, Kernel)) self.assertIsNotNone(small_kernel.kernel) self.assertIsNotNone(small_kernel.labels) df = pd.read_csv('kernel_out.txt', sep='\t', index_col=0) self.assertIsNotNone(df) print(df.shape)
def run(self, data): # Parse input data sif_url = data['network_url'] generator = KernelGenerator() kernel = generator.create_kernel(sif_url) logging.debug('========== Kernel computation finished =========') req = client.post(KERNEL_FILE_SERVER + 'data', data=util.get_kernel_as_string(kernel), stream=True) file_id = req.json()['fileId'] # Register kernel information kernel_id = str(uuid.uuid4()) kernel_file_url = KERNEL_FILE_SERVER + 'data/' + file_id self.redis_conn.hset('kernels', kernel_id, kernel_file_url) self.redis_conn.hset('kernel2network', kernel_id, sif_url) logging.debug('Kernel File Server response Data = ' + str(req.json())) result = { 'kernel_id': kernel_id, 'network': sif_url, 'kernel_file': kernel_file_url } return result
def run(network): generator = KernelGenerator() # First, read network file as table small_net_df = pd.read_csv( network, sep='\t', names=['source', 'interaction', 'target']) print(small_net_df.shape) sources = small_net_df['source'] targets = small_net_df['target'] merged = sources.append(targets) print(len(merged.unique())) # Calculate kernel small_kernel = generator.create_kernel(network) # Serialize file util.write_kernel(small_kernel, 'kernel_out.txt') # Get kernel as big string self.assertIsNotNone(small_kernel) self.assertTrue(isinstance(small_kernel, Kernel)) self.assertIsNotNone(small_kernel.kernel) self.assertIsNotNone(small_kernel.labels) df = pd.read_csv('kernel_out.txt', sep='\t', index_col=0) self.assertIsNotNone(df) print(df.shape)
def test_find_sub_network2(self): print('\n---------- Sub Network Finder tests 2 start -----------\n') from hdsubnetfinder.subnetwork.sub_network_finder import SubNetworkFinder import hdsubnetfinder.subnetwork.network_util as util from hdsubnetfinder.kernel.kernel_generator import KernelGenerator generator = KernelGenerator() small_kernel = generator.create_kernel(network_url=NETWORK_URL) small_sif = util.read_sif(file_url=NETWORK_URL) finder = SubNetworkFinder(network=small_sif, kernel=small_kernel) identifiers = ["NRAS", "KRAS", "MAPK1"] result = finder.get_sub_network(identifiers) self.assertIsNotNone(result) print(len(result)) self.assertEqual(list, type(result)) print('\n---------- finder tests2 finished! -----------\n')
def run(self, data): # Parse input data sif_url = data["network_url"] generator = KernelGenerator() kernel = generator.create_kernel(sif_url) logging.debug("========== Kernel computation finished =========") req = client.post(KERNEL_FILE_SERVER + "data", data=util.get_kernel_as_string(kernel), stream=True) file_id = req.json()["fileId"] # Register kernel information kernel_id = str(uuid.uuid4()) kernel_file_url = KERNEL_FILE_SERVER + "data/" + file_id self.redis_conn.hset("kernels", kernel_id, kernel_file_url) self.redis_conn.hset("kernel2network", kernel_id, sif_url) logging.debug("Kernel File Server response Data = " + str(req.json())) result = {"kernel_id": kernel_id, "network": sif_url, "kernel_file": kernel_file_url} return result