def load_hgram_data_to_json(): from d3_clustergram_class import Network from copy import deepcopy # generate object to hold all data hgram = deepcopy(Network()) # get instance of Network hgram = Network() print(hgram.__doc__) print('\n\tload matrix clustergram') # use hgram method to load Andrew data hgram.load_hgram('hgram_data_latest/gene_dataset_cumulprobs_20150814.txt') # export dictionary and save to file hgram_data_json = hgram.write_json_to_file('dat', 'hgram_data_latest/hgram_latest.json')
def main(): import json from d3_clustergram_class import Network net = Network() mat_info = {} mat_info[str((1, 1))] = 1 print(mat_info[str((1, 1))]) print(mat_info) print(type(mat_info)) tmp = json.dumps(mat_info) print(tmp) print(type(tmp))
def make_ldr_clust(): import json_scripts import numpy as np import d3_clustergram from d3_clustergram_class import Network from ast import literal_eval # load LDR data - stored as: # released status (rl) # nodes, and mat ldr = json_scripts.load_to_dict('ldr_mat.json') print('\nload ldr_mat.json with perts') print(ldr.keys()) ldr['mat'] = np.asarray(ldr['mat']) ldr['rl']['t'] = np.asarray(ldr['rl']['t']) ldr['rl']['f'] = np.asarray(ldr['rl']['f']) print( 'sum all \t' + str(np.sum(ldr['mat'])) ) print( 'sum yes \t' + str(np.sum(ldr['rl']['t'])) ) print( 'sum no \t' + str(np.sum(ldr['rl']['f'])) ) print(len(ldr['nodes']['as'])) print(len(ldr['nodes']['cl'])) print(ldr['mat'].shape) print('\n') print( 'size all \t' + str(ldr['mat'].shape) ) print( 'size yes \t' + str(ldr['rl']['t'].shape) ) print( 'size no \t' + str(ldr['rl']['f'].shape) ) print('\n') print( 'sum all \t' + str(np.sum(ldr['mat'])) ) print( 'sum yes \t' + str(np.sum(ldr['rl']['t'])) ) print( 'sum no \t' + str(np.sum(ldr['rl']['f'])) ) print( 'total yes/no:\t' + str( np.sum(ldr['rl']['t']) + np.sum(ldr['rl']['f']) ) ) # define nodes: unfiltered nodes_uf = {} nodes_uf['row'] = ldr['nodes']['as'] nodes_uf['col'] = ldr['nodes']['cl'] # initialize a new network class ################################## net = Network() net.dat['nodes']['row'] = nodes_uf['row'] net.dat['nodes']['col'] = nodes_uf['col'] # net.dat['mat'] = ldr['mat'] # net.dat['mat_up'] = ldr['rl']['t'] # net.dat['mat_dn'] = -ldr['rl']['f'] # only include released data in visualization net.dat['mat'] = ldr['rl']['t'] # add perts as mat_info ############################ print('\nperts') net.dat['mat_info'] = {} # initialize mat_info for i in range(len(net.dat['nodes']['row'])): for j in range(len(net.dat['nodes']['col'])): tmp_tuple = str((i,j)) # initialize info net.dat['mat_info'][tmp_tuple] = {} for inst_pert in ldr['perts']: pert_data = ldr['perts'][inst_pert] inst_pert = literal_eval(inst_pert) # assay inst_row = inst_pert[0] # cell line inst_col = inst_pert[1] # assay index_row = net.dat['nodes']['row'].index(inst_row) # cell line index_col = net.dat['nodes']['col'].index(inst_col) # save to mat_info tmp_tuple = str((index_row, index_col)) net.dat['mat_info'][str(tmp_tuple)] = pert_data # filter the matrix using cutoff and min_num_meet ################################################### # filtering matrix cutoff_meet = 1 min_num_meet = 1 net.filter_network_thresh( cutoff_meet, min_num_meet ) # cluster ############# cutoff_comp = 3 min_num_comp = 4 net.cluster_row_and_col('cos', cutoff_comp, min_num_comp, dendro=False) # export data visualization to file ###################################### net.write_json_to_file('viz', 'static/networks/LDR_as_cl_released_only.json','indent')